diff -Nru couchdb-1.2.0/acinclude.m4 couchdb-1.4.0~rc.1/acinclude.m4 --- couchdb-1.2.0/acinclude.m4 2012-03-29 17:05:31.000000000 -0400 +++ couchdb-1.4.0~rc.1/acinclude.m4 2013-08-23 10:57:21.000000000 -0400 @@ -17,7 +17,7 @@ m4_define([LOCAL_PACKAGE_NAME], [Apache CouchDB]) m4_define([LOCAL_BUG_URI], [https://issues.apache.org/jira/browse/COUCHDB]) m4_define([LOCAL_VERSION_MAJOR], [1]) -m4_define([LOCAL_VERSION_MINOR], [2]) +m4_define([LOCAL_VERSION_MINOR], [4]) m4_define([LOCAL_VERSION_REVISION], [0]) m4_define([LOCAL_VERSION_STAGE], []) m4_define([LOCAL_VERSION_RELEASE], []) diff -Nru couchdb-1.2.0/aclocal.m4 couchdb-1.4.0~rc.1/aclocal.m4 --- couchdb-1.2.0/aclocal.m4 2012-03-29 17:05:35.000000000 -0400 +++ couchdb-1.4.0~rc.1/aclocal.m4 2013-08-23 10:57:37.000000000 -0400 @@ -1,7 +1,8 @@ -# generated automatically by aclocal 1.10 -*- Autoconf -*- +# generated automatically by aclocal 1.11.6 -*- Autoconf -*- # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, -# 2005, 2006 Free Software Foundation, Inc. +# 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, +# Inc. # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. @@ -11,27 +12,761 @@ # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. -m4_if(m4_PACKAGE_VERSION, [2.61],, -[m4_fatal([this file was generated for autoconf 2.61. -You have another version of autoconf. If you want to use that, -you should regenerate the build system entirely.], [63])]) +m4_ifndef([AC_AUTOCONF_VERSION], + [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl +m4_if(m4_defn([AC_AUTOCONF_VERSION]), [2.69],, +[m4_warning([this file was generated for autoconf 2.69. +You have another version of autoconf. It may work, but is not guaranteed to. +If you have problems, you may need to regenerate the build system entirely. +To do so, use the procedure documented by the package, typically `autoreconf'.])]) -# Copyright (C) 2002, 2003, 2005, 2006 Free Software Foundation, Inc. +# =========================================================================== +# http://www.gnu.org/software/autoconf-archive/ax_check_icu.html +# =========================================================================== +# +# SYNOPSIS +# +# AX_CHECK_ICU(version, action-if, action-if-not) +# +# DESCRIPTION +# +# Defines ICU_LIBS, ICU_CFLAGS, ICU_CXXFLAGS. See icu-config(1) man page. +# +# LICENSE +# +# Copyright (c) 2008 Akos Maroy +# +# Copying and distribution of this file, with or without modification, are +# permitted in any medium without royalty provided the copyright notice +# and this notice are preserved. This file is offered as-is, without any +# warranty. + +#serial 6 + +AU_ALIAS([AC_CHECK_ICU], [AX_CHECK_ICU]) +AC_DEFUN([AX_CHECK_ICU], [ + succeeded=no + + if test -z "$ICU_CONFIG"; then + AC_PATH_PROG(ICU_CONFIG, icu-config, no) + fi + + if test "$ICU_CONFIG" = "no" ; then + echo "*** The icu-config script could not be found. Make sure it is" + echo "*** in your path, and that taglib is properly installed." + echo "*** Or see http://ibm.com/software/globalization/icu/" + else + ICU_VERSION=`$ICU_CONFIG --version` + AC_MSG_CHECKING(for ICU >= $1) + VERSION_CHECK=`expr $ICU_VERSION \>\= $1` + if test "$VERSION_CHECK" = "1" ; then + AC_MSG_RESULT(yes) + succeeded=yes + + AC_MSG_CHECKING(ICU_CPPFLAGS) + ICU_CPPFLAGS=`$ICU_CONFIG --cppflags` + AC_MSG_RESULT($ICU_CPPFLAGS) + + AC_MSG_CHECKING(ICU_CFLAGS) + ICU_CFLAGS=`$ICU_CONFIG --cflags` + AC_MSG_RESULT($ICU_CFLAGS) + + AC_MSG_CHECKING(ICU_CXXFLAGS) + ICU_CXXFLAGS=`$ICU_CONFIG --cxxflags` + AC_MSG_RESULT($ICU_CXXFLAGS) + + AC_MSG_CHECKING(ICU_LIBS) + ICU_LIBS=`$ICU_CONFIG --ldflags` + AC_MSG_RESULT($ICU_LIBS) + else + ICU_CPPFLAGS="" + ICU_CFLAGS="" + ICU_CXXFLAGS="" + ICU_LIBS="" + ## If we have a custom action on failure, don't print errors, but + ## do set a variable so people can do so. + ifelse([$3], ,echo "can't find ICU >= $1",) + fi + + AC_SUBST(ICU_CPPFLAGS) + AC_SUBST(ICU_CFLAGS) + AC_SUBST(ICU_CXXFLAGS) + AC_SUBST(ICU_LIBS) + fi + + if test $succeeded = yes; then + ifelse([$2], , :, [$2]) + else + ifelse([$3], , AC_MSG_ERROR([Library requirements (ICU) not met.]), [$3]) + fi +]) + +# =========================================================================== +# http://www.gnu.org/software/autoconf-archive/ax_compare_version.html +# =========================================================================== +# +# SYNOPSIS +# +# AX_COMPARE_VERSION(VERSION_A, OP, VERSION_B, [ACTION-IF-TRUE], [ACTION-IF-FALSE]) +# +# DESCRIPTION +# +# This macro compares two version strings. Due to the various number of +# minor-version numbers that can exist, and the fact that string +# comparisons are not compatible with numeric comparisons, this is not +# necessarily trivial to do in a autoconf script. This macro makes doing +# these comparisons easy. +# +# The six basic comparisons are available, as well as checking equality +# limited to a certain number of minor-version levels. +# +# The operator OP determines what type of comparison to do, and can be one +# of: +# +# eq - equal (test A == B) +# ne - not equal (test A != B) +# le - less than or equal (test A <= B) +# ge - greater than or equal (test A >= B) +# lt - less than (test A < B) +# gt - greater than (test A > B) +# +# Additionally, the eq and ne operator can have a number after it to limit +# the test to that number of minor versions. +# +# eq0 - equal up to the length of the shorter version +# ne0 - not equal up to the length of the shorter version +# eqN - equal up to N sub-version levels +# neN - not equal up to N sub-version levels +# +# When the condition is true, shell commands ACTION-IF-TRUE are run, +# otherwise shell commands ACTION-IF-FALSE are run. The environment +# variable 'ax_compare_version' is always set to either 'true' or 'false' +# as well. +# +# Examples: +# +# AX_COMPARE_VERSION([3.15.7],[lt],[3.15.8]) +# AX_COMPARE_VERSION([3.15],[lt],[3.15.8]) +# +# would both be true. +# +# AX_COMPARE_VERSION([3.15.7],[eq],[3.15.8]) +# AX_COMPARE_VERSION([3.15],[gt],[3.15.8]) +# +# would both be false. +# +# AX_COMPARE_VERSION([3.15.7],[eq2],[3.15.8]) +# +# would be true because it is only comparing two minor versions. +# +# AX_COMPARE_VERSION([3.15.7],[eq0],[3.15]) +# +# would be true because it is only comparing the lesser number of minor +# versions of the two values. +# +# Note: The characters that separate the version numbers do not matter. An +# empty string is the same as version 0. OP is evaluated by autoconf, not +# configure, so must be a string, not a variable. +# +# The author would like to acknowledge Guido Draheim whose advice about +# the m4_case and m4_ifvaln functions make this macro only include the +# portions necessary to perform the specific comparison specified by the +# OP argument in the final configure script. +# +# LICENSE +# +# Copyright (c) 2008 Tim Toolan +# +# Copying and distribution of this file, with or without modification, are +# permitted in any medium without royalty provided the copyright notice +# and this notice are preserved. This file is offered as-is, without any +# warranty. + +#serial 11 + +dnl ######################################################################### +AC_DEFUN([AX_COMPARE_VERSION], [ + AC_REQUIRE([AC_PROG_AWK]) + + # Used to indicate true or false condition + ax_compare_version=false + + # Convert the two version strings to be compared into a format that + # allows a simple string comparison. The end result is that a version + # string of the form 1.12.5-r617 will be converted to the form + # 0001001200050617. In other words, each number is zero padded to four + # digits, and non digits are removed. + AS_VAR_PUSHDEF([A],[ax_compare_version_A]) + A=`echo "$1" | sed -e 's/\([[0-9]]*\)/Z\1Z/g' \ + -e 's/Z\([[0-9]]\)Z/Z0\1Z/g' \ + -e 's/Z\([[0-9]][[0-9]]\)Z/Z0\1Z/g' \ + -e 's/Z\([[0-9]][[0-9]][[0-9]]\)Z/Z0\1Z/g' \ + -e 's/[[^0-9]]//g'` + + AS_VAR_PUSHDEF([B],[ax_compare_version_B]) + B=`echo "$3" | sed -e 's/\([[0-9]]*\)/Z\1Z/g' \ + -e 's/Z\([[0-9]]\)Z/Z0\1Z/g' \ + -e 's/Z\([[0-9]][[0-9]]\)Z/Z0\1Z/g' \ + -e 's/Z\([[0-9]][[0-9]][[0-9]]\)Z/Z0\1Z/g' \ + -e 's/[[^0-9]]//g'` + + dnl # In the case of le, ge, lt, and gt, the strings are sorted as necessary + dnl # then the first line is used to determine if the condition is true. + dnl # The sed right after the echo is to remove any indented white space. + m4_case(m4_tolower($2), + [lt],[ + ax_compare_version=`echo "x$A +x$B" | sed 's/^ *//' | sort -r | sed "s/x${A}/false/;s/x${B}/true/;1q"` + ], + [gt],[ + ax_compare_version=`echo "x$A +x$B" | sed 's/^ *//' | sort | sed "s/x${A}/false/;s/x${B}/true/;1q"` + ], + [le],[ + ax_compare_version=`echo "x$A +x$B" | sed 's/^ *//' | sort | sed "s/x${A}/true/;s/x${B}/false/;1q"` + ], + [ge],[ + ax_compare_version=`echo "x$A +x$B" | sed 's/^ *//' | sort -r | sed "s/x${A}/true/;s/x${B}/false/;1q"` + ],[ + dnl Split the operator from the subversion count if present. + m4_bmatch(m4_substr($2,2), + [0],[ + # A count of zero means use the length of the shorter version. + # Determine the number of characters in A and B. + ax_compare_version_len_A=`echo "$A" | $AWK '{print(length)}'` + ax_compare_version_len_B=`echo "$B" | $AWK '{print(length)}'` + + # Set A to no more than B's length and B to no more than A's length. + A=`echo "$A" | sed "s/\(.\{$ax_compare_version_len_B\}\).*/\1/"` + B=`echo "$B" | sed "s/\(.\{$ax_compare_version_len_A\}\).*/\1/"` + ], + [[0-9]+],[ + # A count greater than zero means use only that many subversions + A=`echo "$A" | sed "s/\(\([[0-9]]\{4\}\)\{m4_substr($2,2)\}\).*/\1/"` + B=`echo "$B" | sed "s/\(\([[0-9]]\{4\}\)\{m4_substr($2,2)\}\).*/\1/"` + ], + [.+],[ + AC_WARNING( + [illegal OP numeric parameter: $2]) + ],[]) + + # Pad zeros at end of numbers to make same length. + ax_compare_version_tmp_A="$A`echo $B | sed 's/./0/g'`" + B="$B`echo $A | sed 's/./0/g'`" + A="$ax_compare_version_tmp_A" + + # Check for equality or inequality as necessary. + m4_case(m4_tolower(m4_substr($2,0,2)), + [eq],[ + test "x$A" = "x$B" && ax_compare_version=true + ], + [ne],[ + test "x$A" != "x$B" && ax_compare_version=true + ],[ + AC_WARNING([illegal OP parameter: $2]) + ]) + ]) + + AS_VAR_POPDEF([A])dnl + AS_VAR_POPDEF([B])dnl + + dnl # Execute ACTION-IF-TRUE / ACTION-IF-FALSE. + if test "$ax_compare_version" = "true" ; then + m4_ifvaln([$4],[$4],[:])dnl + m4_ifvaln([$5],[else $5])dnl + fi +]) dnl AX_COMPARE_VERSION + +# =========================================================================== +# http://www.gnu.org/software/autoconf-archive/ax_lib_curl.html +# =========================================================================== +# +# SYNOPSIS +# +# AX_LIB_CURL([VERSION],[ACTION-IF-SUCCESS],[ACTION-IF-FAILURE]) +# +# DESCRIPTION +# +# Checks for minimum curl library version VERSION. If successfull executes +# ACTION-IF-SUCCESS otherwise ACTION-IF-FAILURE. +# +# Defines CURL_LIBS and CURL_CFLAGS. +# +# A simple example: +# +# AX_LIB_CURL([7.19.4],,[ +# AC_MSG_ERROR([Your system lacks of libcurl >= 7.19.4]) +# ]) +# +# This macro is a rearranged version of AC_LIB_CURL from Akos Maroy. +# +# LICENSE +# +# Copyright (c) 2009 Francesco Salvestrini +# +# Copying and distribution of this file, with or without modification, are +# permitted in any medium without royalty provided the copyright notice +# and this notice are preserved. This file is offered as-is, without any +# warranty. + +#serial 7 + +AU_ALIAS([AC_CHECK_CURL], [AX_LIB_CURL]) +AC_DEFUN([AX_LIB_CURL], [ + AX_PATH_GENERIC([curl],[$1],'s/^libcurl\ \+//',[$2],[$3]) +]) + +# =========================================================================== +# http://www.gnu.org/software/autoconf-archive/ax_path_generic.html +# =========================================================================== +# +# SYNOPSIS +# +# AX_PATH_GENERIC(LIBRARY,[MINIMUM-VERSION,[SED-EXPR-EXTRACTOR]],[ACTION-IF-FOUND],[ACTION-IF-NOT-FOUND],[CONFIG-SCRIPTS],[CFLAGS-ARG],[LIBS-ARG]) +# +# DESCRIPTION +# +# Runs the LIBRARY-config script and defines LIBRARY_CFLAGS and +# LIBRARY_LIBS unless the user had predefined them in the environment. +# +# The script must support `--cflags' and `--libs' args. If MINIMUM-VERSION +# is specified, the script must also support the `--version' arg. If the +# `--with-library-[exec-]prefix' arguments to ./configure are given, it +# must also support `--prefix' and `--exec-prefix'. Prefereable use +# CONFIG-SCRIPTS as config script, CFLAGS-ARG instead of `--cflags` and +# LIBS-ARG instead of `--libs`, if given. +# +# The SED-EXPR-EXTRACTOR parameter representes the expression used in sed +# to extract the version number. Use it if your 'foo-config --version' +# dumps something like 'Foo library v1.0.0 (alfa)' instead of '1.0.0'. +# +# The macro respects LIBRARY_CONFIG, LIBRARY_CFLAGS and LIBRARY_LIBS +# variables. If the first one is defined, it specifies the name of the +# config script to use. If the latter two are defined, the script is not +# ran at all and their values are used instead (if only one of them is +# defined, the empty value of the remaining one is still used). +# +# Example: +# +# AX_PATH_GENERIC(Foo, 1.0.0) +# +# would run `foo-config --version' and check that it is at least 1.0.0, if +# successful the following variables would be defined and substituted: +# +# FOO_CFLAGS to `foo-config --cflags` +# FOO_LIBS to `foo-config --libs` +# +# Example: +# +# AX_PATH_GENERIC([Bar],,,[ +# AC_MSG_ERROR([Cannot find Bar library]) +# ]) +# +# would check for bar-config program, defining and substituting the +# following variables: +# +# BAR_CFLAGS to `bar-config --cflags` +# BAR_LIBS to `bar-config --libs` +# +# Example: +# +# ./configure BAZ_LIBS=/usr/lib/libbaz.a +# +# would link with a static version of baz library even if `baz-config +# --libs` returns just "-lbaz" that would normally result in using the +# shared library. +# +# This macro is a rearranged version of AC_PATH_GENERIC from Angus Lees. +# +# LICENSE +# +# Copyright (c) 2009 Francesco Salvestrini +# +# Copying and distribution of this file, with or without modification, are +# permitted in any medium without royalty provided the copyright notice +# and this notice are preserved. This file is offered as-is, without any +# warranty. + +#serial 11 + +AU_ALIAS([AC_PATH_GENERIC], [AX_PATH_GENERIC]) +AC_DEFUN([AX_PATH_GENERIC],[ + AC_REQUIRE([AC_PROG_SED]) + + dnl we're going to need uppercase and lowercase versions of the + dnl string `LIBRARY' + pushdef([UP], translit([$1], [a-z], [A-Z]))dnl + pushdef([DOWN], translit([$1], [A-Z], [a-z]))dnl + + AC_ARG_WITH(DOWN-prefix,[AS_HELP_STRING([--with-]DOWN[-prefix=PREFIX], [Prefix where $1 is installed (optional)])], + DOWN[]_config_prefix="$withval", DOWN[]_config_prefix="") + AC_ARG_WITH(DOWN-exec-prefix,[AS_HELP_STRING([--with-]DOWN[-exec-prefix=EPREFIX], [Exec prefix where $1 is installed (optional)])], + DOWN[]_config_exec_prefix="$withval", DOWN[]_config_exec_prefix="") + + AC_ARG_VAR(UP[]_CONFIG, [config script used for $1]) + AC_ARG_VAR(UP[]_CFLAGS, [CFLAGS used for $1]) + AC_ARG_VAR(UP[]_LIBS, [LIBS used for $1]) + + AS_IF([test x$UP[]_CFLAGS != x -o x$UP[]_LIBS != x],[ + dnl Don't run config script at all, use user-provided values instead. + AC_SUBST(UP[]_CFLAGS) + AC_SUBST(UP[]_LIBS) + : + $4 + ],[ + AS_IF([test x$DOWN[]_config_exec_prefix != x],[ + DOWN[]_config_args="$DOWN[]_config_args --exec-prefix=$DOWN[]_config_exec_prefix" + AS_IF([test x${UP[]_CONFIG+set} != xset],[ + UP[]_CONFIG=$DOWN[]_config_exec_prefix/bin/DOWN-config + ]) + ]) + AS_IF([test x$DOWN[]_config_prefix != x],[ + DOWN[]_config_args="$DOWN[]_config_args --prefix=$DOWN[]_config_prefix" + AS_IF([test x${UP[]_CONFIG+set} != xset],[ + UP[]_CONFIG=$DOWN[]_config_prefix/bin/DOWN-config + ]) + ]) + + AC_PATH_PROGS(UP[]_CONFIG,[$6 DOWN-config],[no]) + AS_IF([test "$UP[]_CONFIG" == "no"],[ + : + $5 + ],[ + dnl Get the CFLAGS from LIBRARY-config script + AS_IF([test x"$7" == x],[ + UP[]_CFLAGS="`$UP[]_CONFIG $DOWN[]_config_args --cflags`" + ],[ + UP[]_CFLAGS="`$UP[]_CONFIG $DOWN[]_config_args $7`" + ]) + + dnl Get the LIBS from LIBRARY-config script + AS_IF([test x"$8" == x],[ + UP[]_LIBS="`$UP[]_CONFIG $DOWN[]_config_args --libs`" + ],[ + UP[]_LIBS="`$UP[]_CONFIG $DOWN[]_config_args $8`" + ]) + + AS_IF([test x"$2" != x],[ + dnl Check for provided library version + AS_IF([test x"$3" != x],[ + dnl Use provided sed expression + DOWN[]_version="`$UP[]_CONFIG $DOWN[]_config_args --version | $SED -e $3`" + ],[ + DOWN[]_version="`$UP[]_CONFIG $DOWN[]_config_args --version | $SED -e 's/^\ *\(.*\)\ *$/\1/'`" + ]) + + AC_MSG_CHECKING([for $1 ($DOWN[]_version) >= $2]) + AX_COMPARE_VERSION($DOWN[]_version,[ge],[$2],[ + AC_MSG_RESULT([yes]) + + AC_SUBST(UP[]_CFLAGS) + AC_SUBST(UP[]_LIBS) + : + $4 + ],[ + AC_MSG_RESULT([no]) + : + $5 + ]) + ],[ + AC_SUBST(UP[]_CFLAGS) + AC_SUBST(UP[]_LIBS) + : + $4 + ]) + ]) + ]) + + popdef([UP]) + popdef([DOWN]) +]) + +# =========================================================================== +# http://www.gnu.org/software/autoconf-archive/ax_python_module.html +# =========================================================================== +# +# SYNOPSIS +# +# AX_PYTHON_MODULE(modname[, fatal]) +# +# DESCRIPTION +# +# Checks for Python module. +# +# If fatal is non-empty then absence of a module will trigger an error. +# +# LICENSE +# +# Copyright (c) 2008 Andrew Collier +# +# Copying and distribution of this file, with or without modification, are +# permitted in any medium without royalty provided the copyright notice +# and this notice are preserved. This file is offered as-is, without any +# warranty. + +#serial 5 + +AU_ALIAS([AC_PYTHON_MODULE], [AX_PYTHON_MODULE]) +AC_DEFUN([AX_PYTHON_MODULE],[ + if test -z $PYTHON; + then + PYTHON="python" + fi + PYTHON_NAME=`basename $PYTHON` + AC_MSG_CHECKING($PYTHON_NAME module: $1) + $PYTHON -c "import $1" 2>/dev/null + if test $? -eq 0; + then + AC_MSG_RESULT(yes) + eval AS_TR_CPP(HAVE_PYMOD_$1)=yes + else + AC_MSG_RESULT(no) + eval AS_TR_CPP(HAVE_PYMOD_$1)=no + # + if test -n "$2" + then + AC_MSG_ERROR(failed to find required module $1) + exit 1 + fi + fi +]) + +# pkg.m4 - Macros to locate and utilise pkg-config. -*- Autoconf -*- +# serial 1 (pkg-config-0.24) +# +# Copyright © 2004 Scott James Remnant . +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# +# As a special exception to the GNU General Public License, if you +# distribute this file as part of a program that contains a +# configuration script generated by Autoconf, you may include it under +# the same distribution terms that you use for the rest of that program. + +# PKG_PROG_PKG_CONFIG([MIN-VERSION]) +# ---------------------------------- +AC_DEFUN([PKG_PROG_PKG_CONFIG], +[m4_pattern_forbid([^_?PKG_[A-Z_]+$]) +m4_pattern_allow([^PKG_CONFIG(_(PATH|LIBDIR|SYSROOT_DIR|ALLOW_SYSTEM_(CFLAGS|LIBS)))?$]) +m4_pattern_allow([^PKG_CONFIG_(DISABLE_UNINSTALLED|TOP_BUILD_DIR|DEBUG_SPEW)$]) +AC_ARG_VAR([PKG_CONFIG], [path to pkg-config utility]) +AC_ARG_VAR([PKG_CONFIG_PATH], [directories to add to pkg-config's search path]) +AC_ARG_VAR([PKG_CONFIG_LIBDIR], [path overriding pkg-config's built-in search path]) + +if test "x$ac_cv_env_PKG_CONFIG_set" != "xset"; then + AC_PATH_TOOL([PKG_CONFIG], [pkg-config]) +fi +if test -n "$PKG_CONFIG"; then + _pkg_min_version=m4_default([$1], [0.9.0]) + AC_MSG_CHECKING([pkg-config is at least version $_pkg_min_version]) + if $PKG_CONFIG --atleast-pkgconfig-version $_pkg_min_version; then + AC_MSG_RESULT([yes]) + else + AC_MSG_RESULT([no]) + PKG_CONFIG="" + fi +fi[]dnl +])# PKG_PROG_PKG_CONFIG + +# PKG_CHECK_EXISTS(MODULES, [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND]) +# +# Check to see whether a particular set of modules exists. Similar +# to PKG_CHECK_MODULES(), but does not set variables or print errors. +# +# Please remember that m4 expands AC_REQUIRE([PKG_PROG_PKG_CONFIG]) +# only at the first occurence in configure.ac, so if the first place +# it's called might be skipped (such as if it is within an "if", you +# have to call PKG_CHECK_EXISTS manually +# -------------------------------------------------------------- +AC_DEFUN([PKG_CHECK_EXISTS], +[AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl +if test -n "$PKG_CONFIG" && \ + AC_RUN_LOG([$PKG_CONFIG --exists --print-errors "$1"]); then + m4_default([$2], [:]) +m4_ifvaln([$3], [else + $3])dnl +fi]) + +# _PKG_CONFIG([VARIABLE], [COMMAND], [MODULES]) +# --------------------------------------------- +m4_define([_PKG_CONFIG], +[if test -n "$$1"; then + pkg_cv_[]$1="$$1" + elif test -n "$PKG_CONFIG"; then + PKG_CHECK_EXISTS([$3], + [pkg_cv_[]$1=`$PKG_CONFIG --[]$2 "$3" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes ], + [pkg_failed=yes]) + else + pkg_failed=untried +fi[]dnl +])# _PKG_CONFIG + +# _PKG_SHORT_ERRORS_SUPPORTED +# ----------------------------- +AC_DEFUN([_PKG_SHORT_ERRORS_SUPPORTED], +[AC_REQUIRE([PKG_PROG_PKG_CONFIG]) +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes +else + _pkg_short_errors_supported=no +fi[]dnl +])# _PKG_SHORT_ERRORS_SUPPORTED + + +# PKG_CHECK_MODULES(VARIABLE-PREFIX, MODULES, [ACTION-IF-FOUND], +# [ACTION-IF-NOT-FOUND]) +# +# +# Note that if there is a possibility the first call to +# PKG_CHECK_MODULES might not happen, you should be sure to include an +# explicit call to PKG_PROG_PKG_CONFIG in your configure.ac +# +# +# -------------------------------------------------------------- +AC_DEFUN([PKG_CHECK_MODULES], +[AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl +AC_ARG_VAR([$1][_CFLAGS], [C compiler flags for $1, overriding pkg-config])dnl +AC_ARG_VAR([$1][_LIBS], [linker flags for $1, overriding pkg-config])dnl + +pkg_failed=no +AC_MSG_CHECKING([for $1]) + +_PKG_CONFIG([$1][_CFLAGS], [cflags], [$2]) +_PKG_CONFIG([$1][_LIBS], [libs], [$2]) + +m4_define([_PKG_TEXT], [Alternatively, you may set the environment variables $1[]_CFLAGS +and $1[]_LIBS to avoid the need to call pkg-config. +See the pkg-config man page for more details.]) + +if test $pkg_failed = yes; then + AC_MSG_RESULT([no]) + _PKG_SHORT_ERRORS_SUPPORTED + if test $_pkg_short_errors_supported = yes; then + $1[]_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$2" 2>&1` + else + $1[]_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$2" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$$1[]_PKG_ERRORS" >&AS_MESSAGE_LOG_FD + + m4_default([$4], [AC_MSG_ERROR( +[Package requirements ($2) were not met: + +$$1_PKG_ERRORS + +Consider adjusting the PKG_CONFIG_PATH environment variable if you +installed software in a non-standard prefix. + +_PKG_TEXT])[]dnl + ]) +elif test $pkg_failed = untried; then + AC_MSG_RESULT([no]) + m4_default([$4], [AC_MSG_FAILURE( +[The pkg-config script could not be found or is too old. Make sure it +is in your PATH or set the PKG_CONFIG environment variable to the full +path to pkg-config. + +_PKG_TEXT + +To get pkg-config, see .])[]dnl + ]) +else + $1[]_CFLAGS=$pkg_cv_[]$1[]_CFLAGS + $1[]_LIBS=$pkg_cv_[]$1[]_LIBS + AC_MSG_RESULT([yes]) + $3 +fi[]dnl +])# PKG_CHECK_MODULES + + +# PKG_INSTALLDIR(DIRECTORY) +# ------------------------- +# Substitutes the variable pkgconfigdir as the location where a module +# should install pkg-config .pc files. By default the directory is +# $libdir/pkgconfig, but the default can be changed by passing +# DIRECTORY. The user can override through the --with-pkgconfigdir +# parameter. +AC_DEFUN([PKG_INSTALLDIR], +[m4_pushdef([pkg_default], [m4_default([$1], ['${libdir}/pkgconfig'])]) +m4_pushdef([pkg_description], + [pkg-config installation directory @<:@]pkg_default[@:>@]) +AC_ARG_WITH([pkgconfigdir], + [AS_HELP_STRING([--with-pkgconfigdir], pkg_description)],, + [with_pkgconfigdir=]pkg_default) +AC_SUBST([pkgconfigdir], [$with_pkgconfigdir]) +m4_popdef([pkg_default]) +m4_popdef([pkg_description]) +]) dnl PKG_INSTALLDIR + + +# PKG_NOARCH_INSTALLDIR(DIRECTORY) +# ------------------------- +# Substitutes the variable noarch_pkgconfigdir as the location where a +# module should install arch-independent pkg-config .pc files. By +# default the directory is $datadir/pkgconfig, but the default can be +# changed by passing DIRECTORY. The user can override through the +# --with-noarch-pkgconfigdir parameter. +AC_DEFUN([PKG_NOARCH_INSTALLDIR], +[m4_pushdef([pkg_default], [m4_default([$1], ['${datadir}/pkgconfig'])]) +m4_pushdef([pkg_description], + [pkg-config arch-independent installation directory @<:@]pkg_default[@:>@]) +AC_ARG_WITH([noarch-pkgconfigdir], + [AS_HELP_STRING([--with-noarch-pkgconfigdir], pkg_description)],, + [with_noarch_pkgconfigdir=]pkg_default) +AC_SUBST([noarch_pkgconfigdir], [$with_noarch_pkgconfigdir]) +m4_popdef([pkg_default]) +m4_popdef([pkg_description]) +]) dnl PKG_NOARCH_INSTALLDIR + + +# PKG_CHECK_VAR(VARIABLE, MODULE, CONFIG-VARIABLE, +# [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND]) +# ------------------------------------------- +# Retrieves the value of the pkg-config variable for the given module. +AC_DEFUN([PKG_CHECK_VAR], +[AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl +AC_ARG_VAR([$1], [value of $3 for $2, overriding pkg-config])dnl + +_PKG_CONFIG([$1], [variable="][$3]["], [$2]) +AS_VAR_COPY([$1], [pkg_cv_][$1]) + +AS_VAR_IF([$1], [""], [$5], [$4])dnl +])# PKG_CHECK_VAR + +# Copyright (C) 2002, 2003, 2005, 2006, 2007, 2008, 2011 Free Software +# Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. +# serial 1 + # AM_AUTOMAKE_VERSION(VERSION) # ---------------------------- # Automake X.Y traces this macro to ensure aclocal.m4 has been # generated from the m4 files accompanying Automake X.Y. # (This private macro should not be called outside this file.) AC_DEFUN([AM_AUTOMAKE_VERSION], -[am__api_version='1.10' +[am__api_version='1.11' dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to dnl require some minimum version. Point them to the right macro. -m4_if([$1], [1.10], [], +m4_if([$1], [1.11.6], [], [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl ]) @@ -45,19 +780,23 @@ # AM_SET_CURRENT_AUTOMAKE_VERSION # ------------------------------- # Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced. -# This function is AC_REQUIREd by AC_INIT_AUTOMAKE. +# This function is AC_REQUIREd by AM_INIT_AUTOMAKE. AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION], -[AM_AUTOMAKE_VERSION([1.10])dnl -_AM_AUTOCONF_VERSION(m4_PACKAGE_VERSION)]) +[AM_AUTOMAKE_VERSION([1.11.6])dnl +m4_ifndef([AC_AUTOCONF_VERSION], + [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl +_AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))]) # AM_AUX_DIR_EXPAND -*- Autoconf -*- -# Copyright (C) 2001, 2003, 2005 Free Software Foundation, Inc. +# Copyright (C) 2001, 2003, 2005, 2011 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. +# serial 1 + # For projects using AC_CONFIG_AUX_DIR([foo]), Autoconf sets # $ac_aux_dir to `$srcdir/foo'. In other projects, it is set to # `$srcdir', `$srcdir/..', or `$srcdir/../..'. @@ -105,14 +844,14 @@ # AM_CONDITIONAL -*- Autoconf -*- -# Copyright (C) 1997, 2000, 2001, 2003, 2004, 2005, 2006 +# Copyright (C) 1997, 2000, 2001, 2003, 2004, 2005, 2006, 2008 # Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 8 +# serial 9 # AM_CONDITIONAL(NAME, SHELL-CONDITION) # ------------------------------------- @@ -125,6 +864,7 @@ AC_SUBST([$1_FALSE])dnl _AM_SUBST_NOTMAKE([$1_TRUE])dnl _AM_SUBST_NOTMAKE([$1_FALSE])dnl +m4_define([_AM_COND_VALUE_$1], [$2])dnl if $2; then $1_TRUE= $1_FALSE='#' @@ -138,14 +878,14 @@ Usually this means the macro was only invoked conditionally.]]) fi])]) -# Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 -# Free Software Foundation, Inc. +# Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2009, +# 2010, 2011 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 9 +# serial 12 # There are a few dirty hacks below to avoid letting `AC_PROG_CC' be # written in clear, in which case automake, when reading aclocal.m4, @@ -185,6 +925,7 @@ # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named `D' -- because `-MD' means `put the output # in D'. + rm -rf conftest.dir mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. @@ -202,6 +943,16 @@ if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n ['s/^#*\([a-zA-Z0-9]*\))$/\1/p'] < ./depcomp` fi + am__universal=false + m4_case([$1], [CC], + [case " $depcc " in #( + *\ -arch\ *\ -arch\ *) am__universal=true ;; + esac], + [CXX], + [case " $depcc " in #( + *\ -arch\ *\ -arch\ *) am__universal=true ;; + esac]) + for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and @@ -219,7 +970,17 @@ done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf + # We check with `-c' and `-o' for the sake of the "dashmstdout" + # mode. It turns out that the SunPro C++ compiler does not properly + # handle `-M -o', and we need to detect this. Also, some Intel + # versions had trouble with output in subdirs + am__obj=sub/conftest.${OBJEXT-o} + am__minus_obj="-o $am__obj" case $depmode in + gcc) + # This depmode causes a compiler race in universal mode. + test "$am__universal" = false || continue + ;; nosideeffect) # after this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested @@ -229,19 +990,23 @@ break fi ;; + msvc7 | msvc7msys | msvisualcpp | msvcmsys) + # This compiler won't grok `-c -o', but also, the minuso test has + # not run yet. These depmodes are late enough in the game, and + # so weak that their functioning should not be impacted. + am__obj=conftest.${OBJEXT-o} + am__minus_obj= + ;; none) break ;; esac - # We check with `-c' and `-o' for the sake of the "dashmstdout" - # mode. It turns out that the SunPro C++ compiler does not properly - # handle `-M -o', and we need to detect this. if depmode=$depmode \ - source=sub/conftest.c object=sub/conftest.${OBJEXT-o} \ + source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ - $SHELL ./depcomp $depcc -c -o sub/conftest.${OBJEXT-o} sub/conftest.c \ + $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && - grep sub/conftest.${OBJEXT-o} sub/conftest.Po > /dev/null 2>&1 && + grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message @@ -290,65 +1055,79 @@ if test "x$enable_dependency_tracking" != xno; then am_depcomp="$ac_aux_dir/depcomp" AMDEPBACKSLASH='\' + am__nodep='_no' fi AM_CONDITIONAL([AMDEP], [test "x$enable_dependency_tracking" != xno]) AC_SUBST([AMDEPBACKSLASH])dnl _AM_SUBST_NOTMAKE([AMDEPBACKSLASH])dnl +AC_SUBST([am__nodep])dnl +_AM_SUBST_NOTMAKE([am__nodep])dnl ]) # Generate code to set up dependency tracking. -*- Autoconf -*- -# Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005 +# Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2008 # Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -#serial 3 +#serial 5 # _AM_OUTPUT_DEPENDENCY_COMMANDS # ------------------------------ AC_DEFUN([_AM_OUTPUT_DEPENDENCY_COMMANDS], -[for mf in $CONFIG_FILES; do - # Strip MF so we end up with the name of the file. - mf=`echo "$mf" | sed -e 's/:.*$//'` - # Check whether this is an Automake generated Makefile or not. - # We used to match only the files named `Makefile.in', but - # some people rename them; so instead we look at the file content. - # Grep'ing the first line is not enough: some people post-process - # each Makefile.in and add a new line on top of each file to say so. - # Grep'ing the whole file is not good either: AIX grep has a line - # limit of 2048, but all sed's we know have understand at least 4000. - if sed 10q "$mf" | grep '^#.*generated by automake' > /dev/null 2>&1; then - dirpart=`AS_DIRNAME("$mf")` - else - continue - fi - # Extract the definition of DEPDIR, am__include, and am__quote - # from the Makefile without running `make'. - DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` - test -z "$DEPDIR" && continue - am__include=`sed -n 's/^am__include = //p' < "$mf"` - test -z "am__include" && continue - am__quote=`sed -n 's/^am__quote = //p' < "$mf"` - # When using ansi2knr, U may be empty or an underscore; expand it - U=`sed -n 's/^U = //p' < "$mf"` - # Find all dependency output files, they are included files with - # $(DEPDIR) in their names. We invoke sed twice because it is the - # simplest approach to changing $(DEPDIR) to its actual value in the - # expansion. - for file in `sed -n " - s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ - sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do - # Make sure the directory exists. - test -f "$dirpart/$file" && continue - fdir=`AS_DIRNAME(["$file"])` - AS_MKDIR_P([$dirpart/$fdir]) - # echo "creating $dirpart/$file" - echo '# dummy' > "$dirpart/$file" +[{ + # Autoconf 2.62 quotes --file arguments for eval, but not when files + # are listed without --file. Let's play safe and only enable the eval + # if we detect the quoting. + case $CONFIG_FILES in + *\'*) eval set x "$CONFIG_FILES" ;; + *) set x $CONFIG_FILES ;; + esac + shift + for mf + do + # Strip MF so we end up with the name of the file. + mf=`echo "$mf" | sed -e 's/:.*$//'` + # Check whether this is an Automake generated Makefile or not. + # We used to match only the files named `Makefile.in', but + # some people rename them; so instead we look at the file content. + # Grep'ing the first line is not enough: some people post-process + # each Makefile.in and add a new line on top of each file to say so. + # Grep'ing the whole file is not good either: AIX grep has a line + # limit of 2048, but all sed's we know have understand at least 4000. + if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then + dirpart=`AS_DIRNAME("$mf")` + else + continue + fi + # Extract the definition of DEPDIR, am__include, and am__quote + # from the Makefile without running `make'. + DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` + test -z "$DEPDIR" && continue + am__include=`sed -n 's/^am__include = //p' < "$mf"` + test -z "am__include" && continue + am__quote=`sed -n 's/^am__quote = //p' < "$mf"` + # When using ansi2knr, U may be empty or an underscore; expand it + U=`sed -n 's/^U = //p' < "$mf"` + # Find all dependency output files, they are included files with + # $(DEPDIR) in their names. We invoke sed twice because it is the + # simplest approach to changing $(DEPDIR) to its actual value in the + # expansion. + for file in `sed -n " + s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ + sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do + # Make sure the directory exists. + test -f "$dirpart/$file" && continue + fdir=`AS_DIRNAME(["$file"])` + AS_MKDIR_P([$dirpart/$fdir]) + # echo "creating $dirpart/$file" + echo '# dummy' > "$dirpart/$file" + done done -done +} ])# _AM_OUTPUT_DEPENDENCY_COMMANDS @@ -365,28 +1144,16 @@ [AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"]) ]) -# Copyright (C) 1996, 1997, 2000, 2001, 2003, 2005 -# Free Software Foundation, Inc. -# -# This file is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# serial 8 - -# AM_CONFIG_HEADER is obsolete. It has been replaced by AC_CONFIG_HEADERS. -AU_DEFUN([AM_CONFIG_HEADER], [AC_CONFIG_HEADERS($@)]) - # Do all the work for Automake. -*- Autoconf -*- # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, -# 2005, 2006 Free Software Foundation, Inc. +# 2005, 2006, 2008, 2009 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 12 +# serial 16 # This macro actually does too much. Some checks are only needed if # your package does certain things. But this isn't really a big deal. @@ -403,7 +1170,7 @@ # arguments mandatory, and then we can depend on a new Autoconf # release and drop the old call support. AC_DEFUN([AM_INIT_AUTOMAKE], -[AC_PREREQ([2.60])dnl +[AC_PREREQ([2.62])dnl dnl Autoconf wants to disallow AM_ names. We explicitly allow dnl the ones we care about. m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl @@ -454,8 +1221,8 @@ AM_MISSING_PROG(AUTOMAKE, automake-${am__api_version}) AM_MISSING_PROG(AUTOHEADER, autoheader) AM_MISSING_PROG(MAKEINFO, makeinfo) -AM_PROG_INSTALL_SH -AM_PROG_INSTALL_STRIP +AC_REQUIRE([AM_PROG_INSTALL_SH])dnl +AC_REQUIRE([AM_PROG_INSTALL_STRIP])dnl AC_REQUIRE([AM_PROG_MKDIR_P])dnl # We need awk for the "check" target. The system "awk" is bad on # some platforms. @@ -463,24 +1230,37 @@ AC_REQUIRE([AC_PROG_MAKE_SET])dnl AC_REQUIRE([AM_SET_LEADING_DOT])dnl _AM_IF_OPTION([tar-ustar], [_AM_PROG_TAR([ustar])], - [_AM_IF_OPTION([tar-pax], [_AM_PROG_TAR([pax])], - [_AM_PROG_TAR([v7])])]) + [_AM_IF_OPTION([tar-pax], [_AM_PROG_TAR([pax])], + [_AM_PROG_TAR([v7])])]) _AM_IF_OPTION([no-dependencies],, [AC_PROVIDE_IFELSE([AC_PROG_CC], - [_AM_DEPENDENCIES(CC)], - [define([AC_PROG_CC], - defn([AC_PROG_CC])[_AM_DEPENDENCIES(CC)])])dnl + [_AM_DEPENDENCIES(CC)], + [define([AC_PROG_CC], + defn([AC_PROG_CC])[_AM_DEPENDENCIES(CC)])])dnl AC_PROVIDE_IFELSE([AC_PROG_CXX], - [_AM_DEPENDENCIES(CXX)], - [define([AC_PROG_CXX], - defn([AC_PROG_CXX])[_AM_DEPENDENCIES(CXX)])])dnl + [_AM_DEPENDENCIES(CXX)], + [define([AC_PROG_CXX], + defn([AC_PROG_CXX])[_AM_DEPENDENCIES(CXX)])])dnl AC_PROVIDE_IFELSE([AC_PROG_OBJC], - [_AM_DEPENDENCIES(OBJC)], - [define([AC_PROG_OBJC], - defn([AC_PROG_OBJC])[_AM_DEPENDENCIES(OBJC)])])dnl + [_AM_DEPENDENCIES(OBJC)], + [define([AC_PROG_OBJC], + defn([AC_PROG_OBJC])[_AM_DEPENDENCIES(OBJC)])])dnl ]) +_AM_IF_OPTION([silent-rules], [AC_REQUIRE([AM_SILENT_RULES])])dnl +dnl The `parallel-tests' driver may need to know about EXEEXT, so add the +dnl `am__EXEEXT' conditional if _AM_COMPILER_EXEEXT was seen. This macro +dnl is hooked onto _AC_COMPILER_EXEEXT early, see below. +AC_CONFIG_COMMANDS_PRE(dnl +[m4_provide_if([_AM_COMPILER_EXEEXT], + [AM_CONDITIONAL([am__EXEEXT], [test -n "$EXEEXT"])])])dnl ]) +dnl Hook into `_AC_COMPILER_EXEEXT' early to learn its expansion. Do not +dnl add the conditional right here, as _AC_COMPILER_EXEEXT may be further +dnl mangled by Autoconf and run in a shell conditional statement. +m4_define([_AC_COMPILER_EXEEXT], +m4_defn([_AC_COMPILER_EXEEXT])[m4_provide([_AM_COMPILER_EXEEXT])]) + # When config.status generates a header, we must update the stamp-h file. # This file resides in the same directory as the config header @@ -491,29 +1271,40 @@ # our stamp files there. AC_DEFUN([_AC_AM_CONFIG_HEADER_HOOK], [# Compute $1's index in $config_headers. +_am_arg=$1 _am_stamp_count=1 for _am_header in $config_headers :; do case $_am_header in - $1 | $1:* ) + $_am_arg | $_am_arg:* ) break ;; * ) _am_stamp_count=`expr $_am_stamp_count + 1` ;; esac done -echo "timestamp for $1" >`AS_DIRNAME([$1])`/stamp-h[]$_am_stamp_count]) +echo "timestamp for $_am_arg" >`AS_DIRNAME(["$_am_arg"])`/stamp-h[]$_am_stamp_count]) -# Copyright (C) 2001, 2003, 2005 Free Software Foundation, Inc. +# Copyright (C) 2001, 2003, 2005, 2008, 2011 Free Software Foundation, +# Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. +# serial 1 + # AM_PROG_INSTALL_SH # ------------------ # Define $install_sh. AC_DEFUN([AM_PROG_INSTALL_SH], [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl -install_sh=${install_sh-"\$(SHELL) $am_aux_dir/install-sh"} +if test x"${install_sh}" != xset; then + case $am_aux_dir in + *\ * | *\ *) + install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; + *) + install_sh="\${SHELL} $am_aux_dir/install-sh" + esac +fi AC_SUBST(install_sh)]) # Copyright (C) 2003, 2005 Free Software Foundation, Inc. @@ -539,13 +1330,13 @@ # Check to see how 'make' treats includes. -*- Autoconf -*- -# Copyright (C) 2001, 2002, 2003, 2005 Free Software Foundation, Inc. +# Copyright (C) 2001, 2002, 2003, 2005, 2009 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 3 +# serial 4 # AM_MAKE_INCLUDE() # ----------------- @@ -554,7 +1345,7 @@ [am_make=${MAKE-make} cat > confinc << 'END' am__doit: - @echo done + @echo this is the am__doit target .PHONY: am__doit END # If we don't find an include directive, just comment out the code. @@ -564,24 +1355,24 @@ _am_result=none # First try GNU make style include. echo "include confinc" > confmf -# We grep out `Entering directory' and `Leaving directory' -# messages which can occur if `w' ends up in MAKEFLAGS. -# In particular we don't look at `^make:' because GNU make might -# be invoked under some other name (usually "gmake"), in which -# case it prints its new name instead of `make'. -if test "`$am_make -s -f confmf 2> /dev/null | grep -v 'ing directory'`" = "done"; then - am__include=include - am__quote= - _am_result=GNU -fi +# Ignore all kinds of additional output from `make'. +case `$am_make -s -f confmf 2> /dev/null` in #( +*the\ am__doit\ target*) + am__include=include + am__quote= + _am_result=GNU + ;; +esac # Now try BSD make style include. if test "$am__include" = "#"; then echo '.include "confinc"' > confmf - if test "`$am_make -s -f confmf 2> /dev/null`" = "done"; then - am__include=.include - am__quote="\"" - _am_result=BSD - fi + case `$am_make -s -f confmf 2> /dev/null` in #( + *the\ am__doit\ target*) + am__include=.include + am__quote="\"" + _am_result=BSD + ;; + esac fi AC_SUBST([am__include]) AC_SUBST([am__quote]) @@ -591,14 +1382,14 @@ # Fake the existence of programs that GNU maintainers use. -*- Autoconf -*- -# Copyright (C) 1997, 1999, 2000, 2001, 2003, 2004, 2005 +# Copyright (C) 1997, 1999, 2000, 2001, 2003, 2004, 2005, 2008 # Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 5 +# serial 6 # AM_MISSING_PROG(NAME, PROGRAM) # ------------------------------ @@ -615,7 +1406,14 @@ AC_DEFUN([AM_MISSING_HAS_RUN], [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl AC_REQUIRE_AUX_FILE([missing])dnl -test x"${MISSING+set}" = xset || MISSING="\${SHELL} $am_aux_dir/missing" +if test x"${MISSING+set}" != xset; then + case $am_aux_dir in + *\ * | *\ *) + MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; + *) + MISSING="\${SHELL} $am_aux_dir/missing" ;; + esac +fi # Use eval to expand $SHELL if eval "$MISSING --run true"; then am_missing_run="$MISSING --run " @@ -625,12 +1423,15 @@ fi ]) -# Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc. +# Copyright (C) 2003, 2004, 2005, 2006, 2011 Free Software Foundation, +# Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. +# serial 1 + # AM_PROG_MKDIR_P # --------------- # Check for `mkdir -p'. @@ -653,13 +1454,14 @@ # Helper functions for option handling. -*- Autoconf -*- -# Copyright (C) 2001, 2002, 2003, 2005 Free Software Foundation, Inc. +# Copyright (C) 2001, 2002, 2003, 2005, 2008, 2010 Free Software +# Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 3 +# serial 5 # _AM_MANGLE_OPTION(NAME) # ----------------------- @@ -667,16 +1469,16 @@ [[_AM_OPTION_]m4_bpatsubst($1, [[^a-zA-Z0-9_]], [_])]) # _AM_SET_OPTION(NAME) -# ------------------------------ +# -------------------- # Set option NAME. Presently that only means defining a flag for this option. AC_DEFUN([_AM_SET_OPTION], [m4_define(_AM_MANGLE_OPTION([$1]), 1)]) # _AM_SET_OPTIONS(OPTIONS) -# ---------------------------------- +# ------------------------ # OPTIONS is a space-separated list of Automake options. AC_DEFUN([_AM_SET_OPTIONS], -[AC_FOREACH([_AM_Option], [$1], [_AM_SET_OPTION(_AM_Option)])]) +[m4_foreach_w([_AM_Option], [$1], [_AM_SET_OPTION(_AM_Option)])]) # _AM_IF_OPTION(OPTION, IF-SET, [IF-NOT-SET]) # ------------------------------------------- @@ -686,14 +1488,14 @@ # Check to make sure that the build environment is sane. -*- Autoconf -*- -# Copyright (C) 1996, 1997, 2000, 2001, 2003, 2005 +# Copyright (C) 1996, 1997, 2000, 2001, 2003, 2005, 2008 # Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. -# serial 4 +# serial 5 # AM_SANITY_CHECK # --------------- @@ -702,16 +1504,29 @@ # Just in case sleep 1 echo timestamp > conftest.file +# Reject unsafe characters in $srcdir or the absolute working directory +# name. Accept space and tab only in the latter. +am_lf=' +' +case `pwd` in + *[[\\\"\#\$\&\'\`$am_lf]]*) + AC_MSG_ERROR([unsafe absolute working directory name]);; +esac +case $srcdir in + *[[\\\"\#\$\&\'\`$am_lf\ \ ]]*) + AC_MSG_ERROR([unsafe srcdir value: `$srcdir']);; +esac + # Do `set' in a subshell so we don't clobber the current shell's # arguments. Must try -L first in case configure is actually a # symlink; some systems play weird games with the mod time of symlinks # (eg FreeBSD returns the mod time of the symlink's containing # directory). if ( - set X `ls -Lt $srcdir/configure conftest.file 2> /dev/null` + set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` if test "$[*]" = "X"; then # -L didn't work. - set X `ls -t $srcdir/configure conftest.file` + set X `ls -t "$srcdir/configure" conftest.file` fi rm -f conftest.file if test "$[*]" != "X $srcdir/configure conftest.file" \ @@ -736,12 +1551,14 @@ fi AC_MSG_RESULT(yes)]) -# Copyright (C) 2001, 2003, 2005 Free Software Foundation, Inc. +# Copyright (C) 2001, 2003, 2005, 2011 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. +# serial 1 + # AM_PROG_INSTALL_STRIP # --------------------- # One issue with vendor `install' (even GNU) is that you can't @@ -764,21 +1581,28 @@ INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" AC_SUBST([INSTALL_STRIP_PROGRAM])]) -# Copyright (C) 2006 Free Software Foundation, Inc. +# Copyright (C) 2006, 2008, 2010 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. +# serial 3 + # _AM_SUBST_NOTMAKE(VARIABLE) # --------------------------- -# Prevent Automake from outputing VARIABLE = @VARIABLE@ in Makefile.in. +# Prevent Automake from outputting VARIABLE = @VARIABLE@ in Makefile.in. # This macro is traced by Automake. AC_DEFUN([_AM_SUBST_NOTMAKE]) +# AM_SUBST_NOTMAKE(VARIABLE) +# -------------------------- +# Public sister of _AM_SUBST_NOTMAKE. +AC_DEFUN([AM_SUBST_NOTMAKE], [_AM_SUBST_NOTMAKE($@)]) + # Check how to create a tarball. -*- Autoconf -*- -# Copyright (C) 2004, 2005 Free Software Foundation, Inc. +# Copyright (C) 2004, 2005, 2012 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, @@ -800,10 +1624,11 @@ # a tarball read from stdin. # $(am__untar) < result.tar AC_DEFUN([_AM_PROG_TAR], -[# Always define AMTAR for backward compatibility. -AM_MISSING_PROG([AMTAR], [tar]) +[# Always define AMTAR for backward compatibility. Yes, it's still used +# in the wild :-( We should find a proper way to deprecate it ... +AC_SUBST([AMTAR], ['$${TAR-tar}']) m4_if([$1], [v7], - [am__tar='${AMTAR} chof - "$$tardir"'; am__untar='${AMTAR} xf -'], + [am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -'], [m4_case([$1], [ustar],, [pax],, [m4_fatal([Unknown tar format])]) AC_MSG_CHECKING([how to create a $1 tar archive]) @@ -872,12 +1697,9 @@ AC_SUBST([am__untar]) ]) # _AM_PROG_TAR -m4_include([m4/ac_check_curl.m4]) -m4_include([m4/ac_check_icu.m4]) m4_include([m4/libtool.m4]) m4_include([m4/ltoptions.m4]) m4_include([m4/ltsugar.m4]) m4_include([m4/ltversion.m4]) m4_include([m4/lt~obsolete.m4]) -m4_include([m4/pkg.m4]) m4_include([acinclude.m4]) diff -Nru couchdb-1.2.0/AUTHORS couchdb-1.4.0~rc.1/AUTHORS --- couchdb-1.2.0/AUTHORS 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/AUTHORS 2013-08-23 10:57:21.000000000 -0400 @@ -20,5 +20,11 @@ * Bob Dionne * Dave Cottlehuber * Jason Smith + * Joan Touzet + * Dale Harvey + * Dirkjan Ochtman + * Alexander Shorin + * Garren Smith + * Sue Lockwood For a list of other credits see the `THANKS` file. diff -Nru couchdb-1.2.0/bin/couch-config.tpl.in couchdb-1.4.0~rc.1/bin/couch-config.tpl.in --- couchdb-1.2.0/bin/couch-config.tpl.in 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/bin/couch-config.tpl.in 2013-08-23 10:57:21.000000000 -0400 @@ -53,7 +53,7 @@ --erl-libs-dir Erlang library directory --config-dir configuration directory - --db-dir database dirrectory + --db-dir database directory --view-dir view index directory --static-dir static asset directory --doc-dir documentation directory diff -Nru couchdb-1.2.0/bin/couchdb.1 couchdb-1.4.0~rc.1/bin/couchdb.1 --- couchdb-1.2.0/bin/couchdb.1 2012-03-29 17:05:53.000000000 -0400 +++ couchdb-1.4.0~rc.1/bin/couchdb.1 2013-08-23 10:57:58.000000000 -0400 @@ -1,5 +1,5 @@ -.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.40.5. -.TH COUCHDB "1" "March 2012" "couchdb - Apache CouchDB 1.2.0" "User Commands" +.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.40.11. +.TH COUCHDB "1" "August 2013" "couchdb - Apache CouchDB 1.4.0" "User Commands" .SH NAME couchdb \- Apache CouchDB database server .SH SYNOPSIS diff -Nru couchdb-1.2.0/bin/couchdb.bat.tpl.in couchdb-1.4.0~rc.1/bin/couchdb.bat.tpl.in --- couchdb-1.2.0/bin/couchdb.bat.tpl.in 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/bin/couchdb.bat.tpl.in 2013-08-23 10:57:21.000000000 -0400 @@ -23,4 +23,4 @@ if "%ERL%x" == "x" set ERL=werl.exe echo CouchDB %version% - prepare to relax... -%ERL% -sasl errlog_type error -s couch +%ERL% -sasl errlog_type error -s couch +A 4 +W w diff -Nru couchdb-1.2.0/bin/couchdb.tpl.in couchdb-1.4.0~rc.1/bin/couchdb.tpl.in --- couchdb-1.2.0/bin/couchdb.tpl.in 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/bin/couchdb.tpl.in 2013-08-23 10:57:21.000000000 -0400 @@ -146,6 +146,10 @@ _add_config_dir "$DEFAULT_CONFIG_DIR" _add_config_file "$LOCAL_CONFIG_FILE" _add_config_dir "$LOCAL_CONFIG_DIR" + if [ "$COUCHDB_ADDITIONAL_CONFIG_FILE" != '' ] + then + _add_config_file "$COUCHDB_ADDITIONAL_CONFIG_FILE" + fi } _reset_config () { @@ -227,8 +231,25 @@ touch $PID_FILE interactive_option="+Bd -noinput" fi + + # Find plugins and add them to the Erlang path. + if test -d "%localerlanglibdir%/../../plugins"; then + for plugin in "%localerlanglibdir%/../../plugins"/*; do + if echo "$ERL_ZFLAGS" | grep "$plugin/ebin" > /dev/null 2> /dev/null; then + : # It's already loaded. + else + if echo "$COUCH_PLUGIN_BLACKLIST" | grep "$plugin" > /dev/null 2> /dev/null; then + : # Do not use this plugin. + else + ERL_ZFLAGS="$ERL_ZFLAGS -pz '$plugin/ebin'" + fi + fi + done + export ERL_ZFLAGS + fi + command="%ERL% $interactive_option $ERL_START_OPTIONS \ - -env ERL_LIBS %localerlanglibdir% -couch_ini $start_arguments -s couch" + -env ERL_LIBS $ERL_LIBS:%localerlanglibdir% -couch_ini $start_arguments -s couch" if test "$BACKGROUND" = "true" -a "$RECURSED" = "false"; then $0 $background_start_arguments -b -r $RESPAWN_TIMEOUT -p $PID_FILE \ -o $STDOUT_FILE -e $STDERR_FILE -R & @@ -267,12 +288,20 @@ stop_couchdb () { PID=`_get_pid` + STOP_TIMEOUT=60 if test -n "$PID"; then - if test "$1" = "false"; then - echo > $PID_FILE - fi if kill -0 $PID 2> /dev/null; then - if kill -1 $PID 2> /dev/null; then + if kill -TERM $PID 2> /dev/null; then + count=0 + while kill -0 $PID 2> /dev/null; do + if [ $count -ge $STOP_TIMEOUT ]; then + echo "Apache CouchDB failed to shutdown." + return $SCRIPT_ERROR + else + count=$[count+1] + sleep 1 + fi + done if test "$1" = "false"; then echo "Apache CouchDB has been shutdown." else diff -Nru couchdb-1.2.0/bin/Makefile.am couchdb-1.4.0~rc.1/bin/Makefile.am --- couchdb-1.2.0/bin/Makefile.am 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/bin/Makefile.am 2013-08-23 10:57:21.000000000 -0400 @@ -10,19 +10,31 @@ ## License for the specific language governing permissions and limitations under ## the License. +MAKE_SAFE = $(MAKE) + if WINDOWS bin_SCRIPTS = couchdb.bat else bin_SCRIPTS = couchdb couch-config endif +man1dir = $(mandir)/man1 + noinst_SCRIPTS = couchjs_dev couch-config_dev -if HELP2MAN -dist_man1_MANS = couchdb.1 +man_file = couchdb.1 + +if BUILD_MAN +man_file_build = $(man_file) +else +man_file_build = endif -CLEANFILES = $(bin_SCRIPTS) $(dist_man1_MANS) $(noinst_SCRIPTS) +BUILT_SOURCES = $(man_file_build) + +EXTRA_DIST = $(man_file_build) + +CLEANFILES = $(bin_SCRIPTS) $(man_file_build) $(noinst_SCRIPTS) transform = @program_transform_name@ couchdb_command_name = `echo couchdb | sed '$(transform)'` @@ -67,16 +79,16 @@ -e "s|%bindir%|@bindir@|g" \ -e "s|%localerlanglibdir%|@localerlanglibdir@|g" \ -e "s|%localconfdir%|@localconfdir@|g" \ - -e "s|%localdatadir%|$(localdatadir)|g" \ - -e "s|%localbuilddatadir%|$(localdatadir)|g" \ + -e "s|%localdatadir%|$(localdatadir)|g" \ + -e "s|%localbuilddatadir%|$(localdatadir)|g" \ -e "s|%localstatelogdir%|@localstatelogdir@|g" \ -e "s|%localstatelibdir%|@localstatelibdir@|g" \ -e "s|%localstatedir%|@localstatedir@|g" \ -e "s|%localstaterundir%|@localstaterundir@|g" \ - -e "s|%couchlibdir%|@localerlanglibdir@/couch-%version%|g"\ - -e "s|%couchincludedir%|@localerlanglibdir@/include|g"\ - -e "s|%couchebindir%|@localerlanglibdir@/ebin|g"\ - -e "s|%couchprivlibdir%|@localerlanglibdir@/couch-%version%/priv/lib|g"\ + -e "s|%couchlibdir%|@localerlanglibdir@/couch-%version%|g"\ + -e "s|%couchincludedir%|@localerlanglibdir@/include|g"\ + -e "s|%couchebindir%|@localerlanglibdir@/ebin|g"\ + -e "s|%couchprivlibdir%|@localerlanglibdir@/couch-%version%/priv/lib|g"\ -e "s|%bug_uri%|@bug_uri@|g" \ -e "s|%package_author_address%|@package_author_address@|g" \ -e "s|%package_author_name%|@package_author_name@|g" \ @@ -90,17 +102,17 @@ sed -e "s|%ERL%|$(ERL)|g" \ -e "s|%bindir%|$(abs_top_builddir)/bin|g" \ -e "s|%localerlanglibdir%|$(abs_top_builddir)\/src\/couchdb|g" \ - -e "s|%couchlibdir%|$(abs_top_builddir)\/src\/couchdb|g"\ - -e "s|%couchincludedir%|$(abs_top_builddir)\/src\/couchdb|g"\ - -e "s|%couchebindir%|$(abs_top_builddir)\/src\/couchdb|g"\ - -e "s|%couchprivlibdir%|$(abs_top_builddir)/src/couchdb/priv/.libs|g"\ - -e "s|%localdatadir%|$(abs_top_srcdir)/share|g" \ - -e "s|%localbuilddatadir%|$(abs_top_builddir)/share|g" \ + -e "s|%couchlibdir%|$(abs_top_builddir)\/src\/couchdb|g"\ + -e "s|%couchincludedir%|$(abs_top_builddir)\/src\/couchdb|g"\ + -e "s|%couchebindir%|$(abs_top_builddir)\/src\/couchdb|g"\ + -e "s|%couchprivlibdir%|$(abs_top_builddir)/src/couchdb/priv/.libs|g"\ + -e "s|%localdatadir%|$(abs_top_srcdir)/share|g" \ + -e "s|%localbuilddatadir%|$(abs_top_builddir)/share|g" \ -e "s|%localstatelibdir%|$(abs_top_builddir)/tmp/lib|g" \ -e "s|%localstatelogdir%|$(abs_top_builddir)/tmp/log|g" \ - -e "s|%localstatedir%|$(abs_top_builddir)/tmp|g" \ + -e "s|%localstatedir%|$(abs_top_builddir)/tmp|g" \ -e "s|%localstaterundir%|$(abs_top_builddir)/tmp/run|g" \ - -e "s|%bug_uri%|@bug_uri@|g" \ + -e "s|%bug_uri%|@bug_uri@|g" \ -e "s|%package_author_address%|@package_author_address@|g" \ -e "s|%package_author_name%|@package_author_name@|g" \ -e "s|%package_name%|@package_name@|g" \ @@ -109,17 +121,34 @@ $@ < $< chmod +x $@ +# Depend on source files so distributed man pages are not rebuilt for end user. + +$(man_file): couchdb.tpl.in + $(MAKE_SAFE) -f Makefile couchdb; \ + $(top_srcdir)/build-aux/missing --run \ + help2man \ + --no-info \ + --help-option="-h" \ + --version-option="-V" \ + --name="$(package_name) database server" \ + ./couchdb --output $@ + +install-data-local: + if test -s $(man_file); then \ + if test `cat $(man_file) | wc -l` -gt 1; then \ + $(INSTALL) -d $(DESTDIR)$(man1dir); \ + $(INSTALL_DATA) $(man_file) $(DESTDIR)$(man1dir)/$(man_file); \ + fi \ + fi -HELP2MAN_OPTION=--no-info --help-option="-h" --version-option="-V" +uninstall-local: + rm -f $(DESTDIR)$(man1dir)/$(man_file) -# XXX: Because the scripts are made at build time for the user we need to -# XXX: depend on the original templates so as not to cause the rebuilding of -# XXX: the man pages. - -couchdb.1: couchdb.tpl.in - touch $@ - if test -x "$(HELP2MAN_EXECUTABLE)"; then \ - $(MAKE) -f Makefile couchdb; \ - $(HELP2MAN_EXECUTABLE) $(HELP2MAN_OPTION) \ - --name="Apache CouchDB database server" ./couchdb --output $@; \ +distcheck-hook: + if test ! -s $(man_file); then \ + $(top_srcdir)/build-aux/dist-error $(man_file); \ + else \ + if test ! `cat $(man_file) | wc -l` -gt 1; then \ + $(top_srcdir)/build-aux/dist-error $(man_file); \ + fi \ fi diff -Nru couchdb-1.2.0/bin/Makefile.in couchdb-1.4.0~rc.1/bin/Makefile.in --- couchdb-1.2.0/bin/Makefile.in 2012-03-29 17:05:38.000000000 -0400 +++ couchdb-1.4.0~rc.1/bin/Makefile.in 2013-08-23 10:57:40.000000000 -0400 @@ -1,8 +1,9 @@ -# Makefile.in generated by automake 1.10 from Makefile.am. +# Makefile.in generated by automake 1.11.6 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, -# 2003, 2004, 2005, 2006 Free Software Foundation, Inc. +# 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software +# Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. @@ -15,9 +16,27 @@ @SET_MAKE@ VPATH = @srcdir@ +am__make_dryrun = \ + { \ + am__dry=no; \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + echo 'am--echo: ; @echo "AM" OK' | $(MAKE) -f - 2>/dev/null \ + | grep '^AM OK$$' >/dev/null || am__dry=yes;; \ + *) \ + for am__flg in $$MAKEFLAGS; do \ + case $$am__flg in \ + *=*|--*) ;; \ + *n*) am__dry=yes; break;; \ + esac; \ + done;; \ + esac; \ + test $$am__dry = yes; \ + } pkgdatadir = $(datadir)/@PACKAGE@ -pkglibdir = $(libdir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c @@ -32,30 +51,57 @@ build_triplet = @build@ host_triplet = @host@ subdir = bin -DIST_COMMON = $(dist_man1_MANS) $(srcdir)/Makefile.am \ - $(srcdir)/Makefile.in $(srcdir)/couch-config.tpl.in \ - $(srcdir)/couchdb.bat.tpl.in $(srcdir)/couchdb.tpl.in +DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in \ + $(srcdir)/couch-config.tpl.in $(srcdir)/couchdb.bat.tpl.in \ + $(srcdir)/couchdb.tpl.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 -am__aclocal_m4_deps = $(top_srcdir)/m4/ac_check_curl.m4 \ - $(top_srcdir)/m4/ac_check_icu.m4 $(top_srcdir)/m4/libtool.m4 \ +am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \ $(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \ $(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \ - $(top_srcdir)/m4/pkg.m4 $(top_srcdir)/acinclude.m4 \ - $(top_srcdir)/configure.ac + $(top_srcdir)/acinclude.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h \ $(top_builddir)/src/snappy/google-snappy/config.h CONFIG_CLEAN_FILES = couch-config.tpl couchdb.tpl couchdb.bat.tpl -am__installdirs = "$(DESTDIR)$(bindir)" "$(DESTDIR)$(man1dir)" -binSCRIPT_INSTALL = $(INSTALL_SCRIPT) +CONFIG_CLEAN_VPATH_FILES = +am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; +am__vpath_adj = case $$p in \ + $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ + *) f=$$p;; \ + esac; +am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; +am__install_max = 40 +am__nobase_strip_setup = \ + srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` +am__nobase_strip = \ + for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" +am__nobase_list = $(am__nobase_strip_setup); \ + for p in $$list; do echo "$$p $$p"; done | \ + sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ + $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ + if (++n[$$2] == $(am__install_max)) \ + { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ + END { for (dir in files) print dir, files[dir] }' +am__base_list = \ + sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ + sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' +am__uninstall_files_from_dir = { \ + test -z "$$files" \ + || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ + || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ + $(am__cd) "$$dir" && rm -f $$files; }; \ + } +am__installdirs = "$(DESTDIR)$(bindir)" SCRIPTS = $(bin_SCRIPTS) $(noinst_SCRIPTS) SOURCES = DIST_SOURCES = -man1dir = $(mandir)/man1 -NROFF = nroff -MANS = $(dist_man1_MANS) +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) transform = @program_transform_name@ ACLOCAL = @ACLOCAL@ @@ -96,7 +142,11 @@ FGREP = @FGREP@ FLAGS = @FLAGS@ GREP = @GREP@ -HELP2MAN_EXECUTABLE = @HELP2MAN_EXECUTABLE@ +HAS_HELP2MAN = @HAS_HELP2MAN@ +HAS_INSTALLINFO = @HAS_INSTALLINFO@ +HAS_MAKEINFO = @HAS_MAKEINFO@ +HAS_PDFLATEX = @HAS_PDFLATEX@ +HAS_SPHINX_BUILD = @HAS_SPHINX_BUILD@ ICU_BIN = @ICU_BIN@ ICU_CFLAGS = @ICU_CFLAGS@ ICU_CONFIG = @ICU_CONFIG@ @@ -109,10 +159,7 @@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ -JS185_CFLAGS = @JS185_CFLAGS@ -JS185_LIBS = @JS185_LIBS@ JS_CFLAGS = @JS_CFLAGS@ -JS_LDFLAGS = @JS_LDFLAGS@ JS_LIBS = @JS_LIBS@ JS_LIB_BINARY = @JS_LIB_BINARY@ LD = @LD@ @@ -124,6 +171,7 @@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ +MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ NM = @NM@ NMEDIT = @NMEDIT@ @@ -136,6 +184,7 @@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PKG_CONFIG = @PKG_CONFIG@ @@ -154,6 +203,7 @@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ +ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ @@ -222,6 +272,7 @@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ +top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ version = @version@ @@ -230,29 +281,35 @@ version_release = @version_release@ version_revision = @version_revision@ version_stage = @version_stage@ +MAKE_SAFE = $(MAKE) @WINDOWS_FALSE@bin_SCRIPTS = couchdb couch-config @WINDOWS_TRUE@bin_SCRIPTS = couchdb.bat +man1dir = $(mandir)/man1 noinst_SCRIPTS = couchjs_dev couch-config_dev -@HELP2MAN_TRUE@dist_man1_MANS = couchdb.1 -CLEANFILES = $(bin_SCRIPTS) $(dist_man1_MANS) $(noinst_SCRIPTS) +man_file = couchdb.1 +@BUILD_MAN_FALSE@man_file_build = +@BUILD_MAN_TRUE@man_file_build = $(man_file) +BUILT_SOURCES = $(man_file_build) +EXTRA_DIST = $(man_file_build) +CLEANFILES = $(bin_SCRIPTS) $(man_file_build) $(noinst_SCRIPTS) couchdb_command_name = `echo couchdb | sed '$(transform)'` couchjs_command_name = `echo couchjs | sed '$(transform)'` -HELP2MAN_OPTION = --no-info --help-option="-h" --version-option="-V" -all: all-am +all: $(BUILT_SOURCES) + $(MAKE) $(AM_MAKEFLAGS) all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \ - && exit 0; \ + ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ + && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign bin/Makefile'; \ - cd $(top_srcdir) && \ - $(AUTOMAKE) --foreign bin/Makefile + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign bin/Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --foreign bin/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ @@ -270,6 +327,7 @@ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(am__aclocal_m4_deps): couch-config.tpl: $(top_builddir)/config.status $(srcdir)/couch-config.tpl.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ couchdb.tpl: $(top_builddir)/config.status $(srcdir)/couchdb.tpl.in @@ -278,74 +336,45 @@ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ install-binSCRIPTS: $(bin_SCRIPTS) @$(NORMAL_INSTALL) - test -z "$(bindir)" || $(MKDIR_P) "$(DESTDIR)$(bindir)" - @list='$(bin_SCRIPTS)'; for p in $$list; do \ + @list='$(bin_SCRIPTS)'; test -n "$(bindir)" || list=; \ + if test -n "$$list"; then \ + echo " $(MKDIR_P) '$(DESTDIR)$(bindir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(bindir)" || exit 1; \ + fi; \ + for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ - if test -f $$d$$p; then \ - f=`echo "$$p" | sed 's|^.*/||;$(transform)'`; \ - echo " $(binSCRIPT_INSTALL) '$$d$$p' '$(DESTDIR)$(bindir)/$$f'"; \ - $(binSCRIPT_INSTALL) "$$d$$p" "$(DESTDIR)$(bindir)/$$f"; \ - else :; fi; \ - done + if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \ + done | \ + sed -e 'p;s,.*/,,;n' \ + -e 'h;s|.*|.|' \ + -e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \ + $(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \ + { d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \ + if ($$2 == $$4) { files[d] = files[d] " " $$1; \ + if (++n[d] == $(am__install_max)) { \ + print "f", d, files[d]; n[d] = 0; files[d] = "" } } \ + else { print "f", d "/" $$4, $$1 } } \ + END { for (d in files) print "f", d, files[d] }' | \ + while read type dir files; do \ + if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \ + test -z "$$files" || { \ + echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(bindir)$$dir'"; \ + $(INSTALL_SCRIPT) $$files "$(DESTDIR)$(bindir)$$dir" || exit $$?; \ + } \ + ; done uninstall-binSCRIPTS: @$(NORMAL_UNINSTALL) - @list='$(bin_SCRIPTS)'; for p in $$list; do \ - f=`echo "$$p" | sed 's|^.*/||;$(transform)'`; \ - echo " rm -f '$(DESTDIR)$(bindir)/$$f'"; \ - rm -f "$(DESTDIR)$(bindir)/$$f"; \ - done + @list='$(bin_SCRIPTS)'; test -n "$(bindir)" || exit 0; \ + files=`for p in $$list; do echo "$$p"; done | \ + sed -e 's,.*/,,;$(transform)'`; \ + dir='$(DESTDIR)$(bindir)'; $(am__uninstall_files_from_dir) mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs -install-man1: $(man1_MANS) $(man_MANS) - @$(NORMAL_INSTALL) - test -z "$(man1dir)" || $(MKDIR_P) "$(DESTDIR)$(man1dir)" - @list='$(man1_MANS) $(dist_man1_MANS) $(nodist_man1_MANS)'; \ - l2='$(man_MANS) $(dist_man_MANS) $(nodist_man_MANS)'; \ - for i in $$l2; do \ - case "$$i" in \ - *.1*) list="$$list $$i" ;; \ - esac; \ - done; \ - for i in $$list; do \ - if test -f $(srcdir)/$$i; then file=$(srcdir)/$$i; \ - else file=$$i; fi; \ - ext=`echo $$i | sed -e 's/^.*\\.//'`; \ - case "$$ext" in \ - 1*) ;; \ - *) ext='1' ;; \ - esac; \ - inst=`echo $$i | sed -e 's/\\.[0-9a-z]*$$//'`; \ - inst=`echo $$inst | sed -e 's/^.*\///'`; \ - inst=`echo $$inst | sed '$(transform)'`.$$ext; \ - echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man1dir)/$$inst'"; \ - $(INSTALL_DATA) "$$file" "$(DESTDIR)$(man1dir)/$$inst"; \ - done -uninstall-man1: - @$(NORMAL_UNINSTALL) - @list='$(man1_MANS) $(dist_man1_MANS) $(nodist_man1_MANS)'; \ - l2='$(man_MANS) $(dist_man_MANS) $(nodist_man_MANS)'; \ - for i in $$l2; do \ - case "$$i" in \ - *.1*) list="$$list $$i" ;; \ - esac; \ - done; \ - for i in $$list; do \ - ext=`echo $$i | sed -e 's/^.*\\.//'`; \ - case "$$ext" in \ - 1*) ;; \ - *) ext='1' ;; \ - esac; \ - inst=`echo $$i | sed -e 's/\\.[0-9a-z]*$$//'`; \ - inst=`echo $$inst | sed -e 's/^.*\///'`; \ - inst=`echo $$inst | sed '$(transform)'`.$$ext; \ - echo " rm -f '$(DESTDIR)$(man1dir)/$$inst'"; \ - rm -f "$(DESTDIR)$(man1dir)/$$inst"; \ - done tags: TAGS TAGS: @@ -369,24 +398,30 @@ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ + if test -d "$(distdir)/$$file"; then \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ - cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \ + cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ - cp -pR $$d/$$file $(distdir)$$dir || exit 1; \ + cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ - test -f $(distdir)/$$file \ - || cp -p $$d/$$file $(distdir)/$$file \ + test -f "$(distdir)/$$file" \ + || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am -check: check-am -all-am: Makefile $(SCRIPTS) $(MANS) +check: $(BUILT_SOURCES) + $(MAKE) $(AM_MAKEFLAGS) check-am +all-am: Makefile $(SCRIPTS) installdirs: - for dir in "$(DESTDIR)$(bindir)" "$(DESTDIR)$(man1dir)"; do \ + for dir in "$(DESTDIR)$(bindir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done -install: install-am +install: $(BUILT_SOURCES) + $(MAKE) $(AM_MAKEFLAGS) install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am @@ -396,10 +431,15 @@ installcheck: installcheck-am install-strip: - $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ - install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ - `test -z '$(STRIP)' || \ - echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install + if test -z '$(STRIP)'; then \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + install; \ + else \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ + fi mostlyclean-generic: clean-generic: @@ -407,10 +447,12 @@ distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." + -test -z "$(BUILT_SOURCES)" || rm -f $(BUILT_SOURCES) @WINDOWS_TRUE@install-exec-hook: clean: clean-am @@ -426,28 +468,39 @@ html: html-am +html-am: + info: info-am info-am: -install-data-am: install-man +install-data-am: install-data-local install-dvi: install-dvi-am +install-dvi-am: + install-exec-am: install-binSCRIPTS @$(NORMAL_INSTALL) $(MAKE) $(AM_MAKEFLAGS) install-exec-hook - install-html: install-html-am +install-html-am: + install-info: install-info-am -install-man: install-man1 +install-info-am: + +install-man: install-pdf: install-pdf-am +install-pdf-am: + install-ps: install-ps-am +install-ps-am: + installcheck-am: maintainer-clean: maintainer-clean-am @@ -466,24 +519,22 @@ ps-am: -uninstall-am: uninstall-binSCRIPTS uninstall-man - -uninstall-man: uninstall-man1 +uninstall-am: uninstall-binSCRIPTS uninstall-local -.MAKE: install-am install-exec-am install-strip +.MAKE: all check install install-am install-exec-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ distclean distclean-generic distclean-libtool distdir dvi \ dvi-am html html-am info info-am install install-am \ - install-binSCRIPTS install-data install-data-am install-dvi \ - install-dvi-am install-exec install-exec-am install-exec-hook \ - install-html install-html-am install-info install-info-am \ - install-man install-man1 install-pdf install-pdf-am install-ps \ - install-ps-am install-strip installcheck installcheck-am \ - installdirs maintainer-clean maintainer-clean-generic \ - mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ - ps ps-am uninstall uninstall-am uninstall-binSCRIPTS \ - uninstall-man uninstall-man1 + install-binSCRIPTS install-data install-data-am \ + install-data-local install-dvi install-dvi-am install-exec \ + install-exec-am install-exec-hook install-html install-html-am \ + install-info install-info-am install-man install-pdf \ + install-pdf-am install-ps install-ps-am install-strip \ + installcheck installcheck-am installdirs maintainer-clean \ + maintainer-clean-generic mostlyclean mostlyclean-generic \ + mostlyclean-libtool pdf pdf-am ps ps-am uninstall uninstall-am \ + uninstall-binSCRIPTS uninstall-local couchdb: couchdb.tpl @@ -523,16 +574,16 @@ -e "s|%bindir%|@bindir@|g" \ -e "s|%localerlanglibdir%|@localerlanglibdir@|g" \ -e "s|%localconfdir%|@localconfdir@|g" \ - -e "s|%localdatadir%|$(localdatadir)|g" \ - -e "s|%localbuilddatadir%|$(localdatadir)|g" \ + -e "s|%localdatadir%|$(localdatadir)|g" \ + -e "s|%localbuilddatadir%|$(localdatadir)|g" \ -e "s|%localstatelogdir%|@localstatelogdir@|g" \ -e "s|%localstatelibdir%|@localstatelibdir@|g" \ -e "s|%localstatedir%|@localstatedir@|g" \ -e "s|%localstaterundir%|@localstaterundir@|g" \ - -e "s|%couchlibdir%|@localerlanglibdir@/couch-%version%|g"\ - -e "s|%couchincludedir%|@localerlanglibdir@/include|g"\ - -e "s|%couchebindir%|@localerlanglibdir@/ebin|g"\ - -e "s|%couchprivlibdir%|@localerlanglibdir@/couch-%version%/priv/lib|g"\ + -e "s|%couchlibdir%|@localerlanglibdir@/couch-%version%|g"\ + -e "s|%couchincludedir%|@localerlanglibdir@/include|g"\ + -e "s|%couchebindir%|@localerlanglibdir@/ebin|g"\ + -e "s|%couchprivlibdir%|@localerlanglibdir@/couch-%version%/priv/lib|g"\ -e "s|%bug_uri%|@bug_uri@|g" \ -e "s|%package_author_address%|@package_author_address@|g" \ -e "s|%package_author_name%|@package_author_name@|g" \ @@ -546,17 +597,17 @@ sed -e "s|%ERL%|$(ERL)|g" \ -e "s|%bindir%|$(abs_top_builddir)/bin|g" \ -e "s|%localerlanglibdir%|$(abs_top_builddir)\/src\/couchdb|g" \ - -e "s|%couchlibdir%|$(abs_top_builddir)\/src\/couchdb|g"\ - -e "s|%couchincludedir%|$(abs_top_builddir)\/src\/couchdb|g"\ - -e "s|%couchebindir%|$(abs_top_builddir)\/src\/couchdb|g"\ - -e "s|%couchprivlibdir%|$(abs_top_builddir)/src/couchdb/priv/.libs|g"\ - -e "s|%localdatadir%|$(abs_top_srcdir)/share|g" \ - -e "s|%localbuilddatadir%|$(abs_top_builddir)/share|g" \ + -e "s|%couchlibdir%|$(abs_top_builddir)\/src\/couchdb|g"\ + -e "s|%couchincludedir%|$(abs_top_builddir)\/src\/couchdb|g"\ + -e "s|%couchebindir%|$(abs_top_builddir)\/src\/couchdb|g"\ + -e "s|%couchprivlibdir%|$(abs_top_builddir)/src/couchdb/priv/.libs|g"\ + -e "s|%localdatadir%|$(abs_top_srcdir)/share|g" \ + -e "s|%localbuilddatadir%|$(abs_top_builddir)/share|g" \ -e "s|%localstatelibdir%|$(abs_top_builddir)/tmp/lib|g" \ -e "s|%localstatelogdir%|$(abs_top_builddir)/tmp/log|g" \ - -e "s|%localstatedir%|$(abs_top_builddir)/tmp|g" \ + -e "s|%localstatedir%|$(abs_top_builddir)/tmp|g" \ -e "s|%localstaterundir%|$(abs_top_builddir)/tmp/run|g" \ - -e "s|%bug_uri%|@bug_uri@|g" \ + -e "s|%bug_uri%|@bug_uri@|g" \ -e "s|%package_author_address%|@package_author_address@|g" \ -e "s|%package_author_name%|@package_author_name@|g" \ -e "s|%package_name%|@package_name@|g" \ @@ -565,17 +616,38 @@ $@ < $< chmod +x $@ -# XXX: Because the scripts are made at build time for the user we need to -# XXX: depend on the original templates so as not to cause the rebuilding of -# XXX: the man pages. - -couchdb.1: couchdb.tpl.in - touch $@ - if test -x "$(HELP2MAN_EXECUTABLE)"; then \ - $(MAKE) -f Makefile couchdb; \ - $(HELP2MAN_EXECUTABLE) $(HELP2MAN_OPTION) \ - --name="Apache CouchDB database server" ./couchdb --output $@; \ +# Depend on source files so distributed man pages are not rebuilt for end user. + +$(man_file): couchdb.tpl.in + $(MAKE_SAFE) -f Makefile couchdb; \ + $(top_srcdir)/build-aux/missing --run \ + help2man \ + --no-info \ + --help-option="-h" \ + --version-option="-V" \ + --name="$(package_name) database server" \ + ./couchdb --output $@ + +install-data-local: + if test -s $(man_file); then \ + if test `cat $(man_file) | wc -l` -gt 1; then \ + $(INSTALL) -d $(DESTDIR)$(man1dir); \ + $(INSTALL_DATA) $(man_file) $(DESTDIR)$(man1dir)/$(man_file); \ + fi \ fi + +uninstall-local: + rm -f $(DESTDIR)$(man1dir)/$(man_file) + +distcheck-hook: + if test ! -s $(man_file); then \ + $(top_srcdir)/build-aux/dist-error $(man_file); \ + else \ + if test ! `cat $(man_file) | wc -l` -gt 1; then \ + $(top_srcdir)/build-aux/dist-error $(man_file); \ + fi \ + fi + # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: diff -Nru couchdb-1.2.0/BUGS couchdb-1.4.0~rc.1/BUGS --- couchdb-1.2.0/BUGS 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/BUGS 2013-08-23 10:57:21.000000000 -0400 @@ -1,6 +1,8 @@ Apache CouchDB BUGS =================== -Please see the [documentation][1] on how to report bugs with Apache CouchDB. +Visit our issue tracker: -[1] http://couchdb.apache.org/community/issues.html + https://issues.apache.org/jira/browse/CouchDB + +You can use this to report bugs, request features, or suggest enhancements. diff -Nru couchdb-1.2.0/build-aux/config.guess couchdb-1.4.0~rc.1/build-aux/config.guess --- couchdb-1.2.0/build-aux/config.guess 2011-11-03 17:56:27.000000000 -0400 +++ couchdb-1.4.0~rc.1/build-aux/config.guess 2013-05-20 03:33:19.000000000 -0400 @@ -1,14 +1,14 @@ #! /bin/sh # Attempt to guess a canonical system name. # Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, -# 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, -# Inc. +# 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +# 2011, 2012, 2013 Free Software Foundation, Inc. -timestamp='2006-07-02' +timestamp='2012-12-30' # This file is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or +# the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but @@ -17,26 +17,22 @@ # General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA -# 02110-1301, USA. +# along with this program; if not, see . # # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under -# the same distribution terms that you use for the rest of that program. - - -# Originally written by Per Bothner . -# Please send patches to . Submit a context -# diff and a properly formatted ChangeLog entry. +# the same distribution terms that you use for the rest of that +# program. This Exception is an additional permission under section 7 +# of the GNU General Public License, version 3 ("GPLv3"). +# +# Originally written by Per Bothner. # -# This script attempts to guess a canonical system name similar to -# config.sub. If it succeeds, it prints the system name on stdout, and -# exits with 0. Otherwise, it exits with 1. +# You can get the latest version of this script from: +# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD # -# The plan is that this can be called by configure scripts if you -# don't specify an explicit build system type. +# Please send patches with a ChangeLog entry to config-patches@gnu.org. + me=`echo "$0" | sed -e 's,.*/,,'` @@ -56,8 +52,9 @@ GNU config.guess ($timestamp) Originally written by Per Bothner. -Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 -Free Software Foundation, Inc. +Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, +2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, +2012, 2013 Free Software Foundation, Inc. This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." @@ -139,12 +136,35 @@ UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown +case "${UNAME_SYSTEM}" in +Linux|GNU/*) + eval $set_cc_for_build + cat <<-EOF > $dummy.c + #include + #ifdef __UCLIBC__ + # ifdef __UCLIBC_CONFIG_VERSION__ + LIBC=uclibc __UCLIBC_CONFIG_VERSION__ + # else + LIBC=uclibc + # endif + #else + # ifdef __dietlibc__ + LIBC=dietlibc + # else + LIBC=gnu + # endif + #endif + EOF + eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^LIBC'` + ;; +esac + # Note: order is significant - the case branches are not exclusive. case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in *:NetBSD:*:*) # NetBSD (nbsd) targets should (where applicable) match one or - # more of the tupples: *-*-netbsdelf*, *-*-netbsdaout*, + # more of the tuples: *-*-netbsdelf*, *-*-netbsdaout*, # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently # switched to ELF, *-*-netbsd* would select the old # object file format. This provides both forward @@ -161,6 +181,7 @@ arm*) machine=arm-unknown ;; sh3el) machine=shl-unknown ;; sh3eb) machine=sh-unknown ;; + sh5el) machine=sh5le-unknown ;; *) machine=${UNAME_MACHINE_ARCH}-unknown ;; esac # The Operating System including object format, if it has switched @@ -169,7 +190,7 @@ arm*|i386|m68k|ns32k|sh3*|sparc|vax) eval $set_cc_for_build if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \ - | grep __ELF__ >/dev/null + | grep -q __ELF__ then # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout). # Return netbsd for either. FIX? @@ -179,7 +200,7 @@ fi ;; *) - os=netbsd + os=netbsd ;; esac # The OS release @@ -200,6 +221,10 @@ # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used. echo "${machine}-${os}${release}" exit ;; + *:Bitrig:*:*) + UNAME_MACHINE_ARCH=`arch | sed 's/Bitrig.//'` + echo ${UNAME_MACHINE_ARCH}-unknown-bitrig${UNAME_RELEASE} + exit ;; *:OpenBSD:*:*) UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'` echo ${UNAME_MACHINE_ARCH}-unknown-openbsd${UNAME_RELEASE} @@ -222,7 +247,7 @@ UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'` ;; *5.*) - UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'` + UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'` ;; esac # According to Compaq, /usr/sbin/psrinfo has been available on @@ -268,7 +293,10 @@ # A Xn.n version is an unreleased experimental baselevel. # 1.2 uses "1.2" for uname -r. echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'` - exit ;; + # Reset EXIT trap before exiting to avoid spurious non-zero exit code. + exitcode=$? + trap '' 0 + exit $exitcode ;; Alpha\ *:Windows_NT*:*) # How do we know it's Interix rather than the generic POSIX subsystem? # Should we change UNAME_MACHINE based on the output of uname instead @@ -294,12 +322,12 @@ echo s390-ibm-zvmoe exit ;; *:OS400:*:*) - echo powerpc-ibm-os400 + echo powerpc-ibm-os400 exit ;; arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*) echo arm-acorn-riscix${UNAME_RELEASE} exit ;; - arm:riscos:*:*|arm:RISCOS:*:*) + arm*:riscos:*:*|arm*:RISCOS:*:*) echo arm-unknown-riscos exit ;; SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*) @@ -323,14 +351,33 @@ case `/usr/bin/uname -p` in sparc) echo sparc-icl-nx7; exit ;; esac ;; + s390x:SunOS:*:*) + echo ${UNAME_MACHINE}-ibm-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + exit ;; sun4H:SunOS:5.*:*) echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*) echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; - i86pc:SunOS:5.*:*) - echo i386-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` + i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*) + echo i386-pc-auroraux${UNAME_RELEASE} + exit ;; + i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*) + eval $set_cc_for_build + SUN_ARCH="i386" + # If there is a compiler, see if it is configured for 64-bit objects. + # Note that the Sun cc does not turn __LP64__ into 1 like gcc does. + # This test works for both compilers. + if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then + if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \ + (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \ + grep IS_64BIT_ARCH >/dev/null + then + SUN_ARCH="x86_64" + fi + fi + echo ${SUN_ARCH}-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; sun4*:SunOS:6*:*) # According to config.sub, this is the proper way to canonicalize @@ -374,23 +421,23 @@ # MiNT. But MiNT is downward compatible to TOS, so this should # be no problem. atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*) - echo m68k-atari-mint${UNAME_RELEASE} + echo m68k-atari-mint${UNAME_RELEASE} exit ;; atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*) echo m68k-atari-mint${UNAME_RELEASE} - exit ;; + exit ;; *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*) - echo m68k-atari-mint${UNAME_RELEASE} + echo m68k-atari-mint${UNAME_RELEASE} exit ;; milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*) - echo m68k-milan-mint${UNAME_RELEASE} - exit ;; + echo m68k-milan-mint${UNAME_RELEASE} + exit ;; hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*) - echo m68k-hades-mint${UNAME_RELEASE} - exit ;; + echo m68k-hades-mint${UNAME_RELEASE} + exit ;; *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*) - echo m68k-unknown-mint${UNAME_RELEASE} - exit ;; + echo m68k-unknown-mint${UNAME_RELEASE} + exit ;; m68k:machten:*:*) echo m68k-apple-machten${UNAME_RELEASE} exit ;; @@ -460,8 +507,8 @@ echo m88k-motorola-sysv3 exit ;; AViiON:dgux:*:*) - # DG/UX returns AViiON for all architectures - UNAME_PROCESSOR=`/usr/bin/uname -p` + # DG/UX returns AViiON for all architectures + UNAME_PROCESSOR=`/usr/bin/uname -p` if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ] then if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \ @@ -474,7 +521,7 @@ else echo i586-dg-dgux${UNAME_RELEASE} fi - exit ;; + exit ;; M88*:DolphinOS:*:*) # DolphinOS (SVR3) echo m88k-dolphin-sysv3 exit ;; @@ -531,7 +578,7 @@ echo rs6000-ibm-aix3.2 fi exit ;; - *:AIX:*:[45]) + *:AIX:*:[4567]) IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'` if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then IBM_ARCH=rs6000 @@ -574,52 +621,52 @@ 9000/[678][0-9][0-9]) if [ -x /usr/bin/getconf ]; then sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null` - sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null` - case "${sc_cpu_version}" in - 523) HP_ARCH="hppa1.0" ;; # CPU_PA_RISC1_0 - 528) HP_ARCH="hppa1.1" ;; # CPU_PA_RISC1_1 - 532) # CPU_PA_RISC2_0 - case "${sc_kernel_bits}" in - 32) HP_ARCH="hppa2.0n" ;; - 64) HP_ARCH="hppa2.0w" ;; + sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null` + case "${sc_cpu_version}" in + 523) HP_ARCH="hppa1.0" ;; # CPU_PA_RISC1_0 + 528) HP_ARCH="hppa1.1" ;; # CPU_PA_RISC1_1 + 532) # CPU_PA_RISC2_0 + case "${sc_kernel_bits}" in + 32) HP_ARCH="hppa2.0n" ;; + 64) HP_ARCH="hppa2.0w" ;; '') HP_ARCH="hppa2.0" ;; # HP-UX 10.20 - esac ;; - esac + esac ;; + esac fi if [ "${HP_ARCH}" = "" ]; then eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c + sed 's/^ //' << EOF >$dummy.c - #define _HPUX_SOURCE - #include - #include - - int main () - { - #if defined(_SC_KERNEL_BITS) - long bits = sysconf(_SC_KERNEL_BITS); - #endif - long cpu = sysconf (_SC_CPU_VERSION); - - switch (cpu) - { - case CPU_PA_RISC1_0: puts ("hppa1.0"); break; - case CPU_PA_RISC1_1: puts ("hppa1.1"); break; - case CPU_PA_RISC2_0: - #if defined(_SC_KERNEL_BITS) - switch (bits) - { - case 64: puts ("hppa2.0w"); break; - case 32: puts ("hppa2.0n"); break; - default: puts ("hppa2.0"); break; - } break; - #else /* !defined(_SC_KERNEL_BITS) */ - puts ("hppa2.0"); break; - #endif - default: puts ("hppa1.0"); break; - } - exit (0); - } + #define _HPUX_SOURCE + #include + #include + + int main () + { + #if defined(_SC_KERNEL_BITS) + long bits = sysconf(_SC_KERNEL_BITS); + #endif + long cpu = sysconf (_SC_CPU_VERSION); + + switch (cpu) + { + case CPU_PA_RISC1_0: puts ("hppa1.0"); break; + case CPU_PA_RISC1_1: puts ("hppa1.1"); break; + case CPU_PA_RISC2_0: + #if defined(_SC_KERNEL_BITS) + switch (bits) + { + case 64: puts ("hppa2.0w"); break; + case 32: puts ("hppa2.0n"); break; + default: puts ("hppa2.0"); break; + } break; + #else /* !defined(_SC_KERNEL_BITS) */ + puts ("hppa2.0"); break; + #endif + default: puts ("hppa1.0"); break; + } + exit (0); + } EOF (CCOPTS= $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy` test -z "$HP_ARCH" && HP_ARCH=hppa @@ -639,7 +686,7 @@ # => hppa64-hp-hpux11.23 if echo __LP64__ | (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | - grep __LP64__ >/dev/null + grep -q __LP64__ then HP_ARCH="hppa2.0w" else @@ -710,22 +757,22 @@ exit ;; C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*) echo c1-convex-bsd - exit ;; + exit ;; C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*) if getsysinfo -f scalar_acc then echo c32-convex-bsd else echo c2-convex-bsd fi - exit ;; + exit ;; C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*) echo c34-convex-bsd - exit ;; + exit ;; C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*) echo c38-convex-bsd - exit ;; + exit ;; C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*) echo c4-convex-bsd - exit ;; + exit ;; CRAY*Y-MP:*:*:*) echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit ;; @@ -749,14 +796,14 @@ exit ;; F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*) FUJITSU_PROC=`uname -m | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'` - FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'` - FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'` - echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" - exit ;; + FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'` + FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'` + echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" + exit ;; 5000:UNIX_System_V:4.*:*) - FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'` - FUJITSU_REL=`echo ${UNAME_RELEASE} | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/ /_/'` - echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" + FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'` + FUJITSU_REL=`echo ${UNAME_RELEASE} | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/ /_/'` + echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" exit ;; i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*) echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE} @@ -768,37 +815,51 @@ echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE} exit ;; *:FreeBSD:*:*) - case ${UNAME_MACHINE} in - pc98) - echo i386-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; + UNAME_PROCESSOR=`/usr/bin/uname -p` + case ${UNAME_PROCESSOR} in amd64) echo x86_64-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; *) - echo ${UNAME_MACHINE}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; + echo ${UNAME_PROCESSOR}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; esac exit ;; i*:CYGWIN*:*) echo ${UNAME_MACHINE}-pc-cygwin exit ;; - i*:MINGW*:*) + *:MINGW64*:*) + echo ${UNAME_MACHINE}-pc-mingw64 + exit ;; + *:MINGW*:*) echo ${UNAME_MACHINE}-pc-mingw32 exit ;; + i*:MSYS*:*) + echo ${UNAME_MACHINE}-pc-msys + exit ;; i*:windows32*:*) - # uname -m includes "-pc" on this system. - echo ${UNAME_MACHINE}-mingw32 + # uname -m includes "-pc" on this system. + echo ${UNAME_MACHINE}-mingw32 exit ;; i*:PW*:*) echo ${UNAME_MACHINE}-pc-pw32 exit ;; - x86:Interix*:[3456]*) - echo i586-pc-interix${UNAME_RELEASE} - exit ;; - EM64T:Interix*:[3456]*) - echo x86_64-unknown-interix${UNAME_RELEASE} - exit ;; + *:Interix*:*) + case ${UNAME_MACHINE} in + x86) + echo i586-pc-interix${UNAME_RELEASE} + exit ;; + authenticamd | genuineintel | EM64T) + echo x86_64-unknown-interix${UNAME_RELEASE} + exit ;; + IA64) + echo ia64-unknown-interix${UNAME_RELEASE} + exit ;; + esac ;; [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*) echo i${UNAME_MACHINE}-pc-mks exit ;; + 8664:Windows_NT:*) + echo x86_64-pc-mks + exit ;; i*:Windows_NT*:* | Pentium*:Windows_NT*:*) # How do we know it's Interix rather than the generic POSIX subsystem? # It also conflicts with pre-2.0 versions of AT&T UWIN. Should we @@ -819,200 +880,145 @@ exit ;; *:GNU:*:*) # the GNU system - echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-gnu`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'` + echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-${LIBC}`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'` exit ;; *:GNU/*:*:*) # other systems with GNU libc and userland - echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr '[A-Z]' '[a-z]'``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-gnu + echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr '[A-Z]' '[a-z]'``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-${LIBC} exit ;; i*86:Minix:*:*) echo ${UNAME_MACHINE}-pc-minix exit ;; + aarch64:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + aarch64_be:Linux:*:*) + UNAME_MACHINE=aarch64_be + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + alpha:Linux:*:*) + case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in + EV5) UNAME_MACHINE=alphaev5 ;; + EV56) UNAME_MACHINE=alphaev56 ;; + PCA56) UNAME_MACHINE=alphapca56 ;; + PCA57) UNAME_MACHINE=alphapca56 ;; + EV6) UNAME_MACHINE=alphaev6 ;; + EV67) UNAME_MACHINE=alphaev67 ;; + EV68*) UNAME_MACHINE=alphaev68 ;; + esac + objdump --private-headers /bin/sh | grep -q ld.so.1 + if test "$?" = 0 ; then LIBC="gnulibc1" ; fi + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; arm*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + eval $set_cc_for_build + if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \ + | grep -q __ARM_EABI__ + then + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + else + if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \ + | grep -q __ARM_PCS_VFP + then + echo ${UNAME_MACHINE}-unknown-linux-${LIBC}eabi + else + echo ${UNAME_MACHINE}-unknown-linux-${LIBC}eabihf + fi + fi exit ;; avr32*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; cris:Linux:*:*) - echo cris-axis-linux-gnu + echo ${UNAME_MACHINE}-axis-linux-${LIBC} exit ;; crisv32:Linux:*:*) - echo crisv32-axis-linux-gnu + echo ${UNAME_MACHINE}-axis-linux-${LIBC} exit ;; frv:Linux:*:*) - echo frv-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + hexagon:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + i*86:Linux:*:*) + echo ${UNAME_MACHINE}-pc-linux-${LIBC} exit ;; ia64:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; m32r*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; m68*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; - mips:Linux:*:*) + mips:Linux:*:* | mips64:Linux:*:*) eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #undef CPU - #undef mips - #undef mipsel + #undef ${UNAME_MACHINE} + #undef ${UNAME_MACHINE}el #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL) - CPU=mipsel + CPU=${UNAME_MACHINE}el #else #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB) - CPU=mips + CPU=${UNAME_MACHINE} #else CPU= #endif #endif EOF - eval "`$CC_FOR_BUILD -E $dummy.c 2>/dev/null | sed -n ' - /^CPU/{ - s: ::g - p - }'`" - test x"${CPU}" != x && { echo "${CPU}-unknown-linux-gnu"; exit; } - ;; - mips64:Linux:*:*) - eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c - #undef CPU - #undef mips64 - #undef mips64el - #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL) - CPU=mips64el - #else - #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB) - CPU=mips64 - #else - CPU= - #endif - #endif -EOF - eval "`$CC_FOR_BUILD -E $dummy.c 2>/dev/null | sed -n ' - /^CPU/{ - s: ::g - p - }'`" - test x"${CPU}" != x && { echo "${CPU}-unknown-linux-gnu"; exit; } + eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^CPU'` + test x"${CPU}" != x && { echo "${CPU}-unknown-linux-${LIBC}"; exit; } ;; or32:Linux:*:*) - echo or32-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; - ppc:Linux:*:*) - echo powerpc-unknown-linux-gnu - exit ;; - ppc64:Linux:*:*) - echo powerpc64-unknown-linux-gnu + padre:Linux:*:*) + echo sparc-unknown-linux-${LIBC} exit ;; - alpha:Linux:*:*) - case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in - EV5) UNAME_MACHINE=alphaev5 ;; - EV56) UNAME_MACHINE=alphaev56 ;; - PCA56) UNAME_MACHINE=alphapca56 ;; - PCA57) UNAME_MACHINE=alphapca56 ;; - EV6) UNAME_MACHINE=alphaev6 ;; - EV67) UNAME_MACHINE=alphaev67 ;; - EV68*) UNAME_MACHINE=alphaev68 ;; - esac - objdump --private-headers /bin/sh | grep ld.so.1 >/dev/null - if test "$?" = 0 ; then LIBC="libc1" ; else LIBC="" ; fi - echo ${UNAME_MACHINE}-unknown-linux-gnu${LIBC} + parisc64:Linux:*:* | hppa64:Linux:*:*) + echo hppa64-unknown-linux-${LIBC} exit ;; parisc:Linux:*:* | hppa:Linux:*:*) # Look for CPU level case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in - PA7*) echo hppa1.1-unknown-linux-gnu ;; - PA8*) echo hppa2.0-unknown-linux-gnu ;; - *) echo hppa-unknown-linux-gnu ;; + PA7*) echo hppa1.1-unknown-linux-${LIBC} ;; + PA8*) echo hppa2.0-unknown-linux-${LIBC} ;; + *) echo hppa-unknown-linux-${LIBC} ;; esac exit ;; - parisc64:Linux:*:* | hppa64:Linux:*:*) - echo hppa64-unknown-linux-gnu + ppc64:Linux:*:*) + echo powerpc64-unknown-linux-${LIBC} + exit ;; + ppc:Linux:*:*) + echo powerpc-unknown-linux-${LIBC} exit ;; s390:Linux:*:* | s390x:Linux:*:*) echo ${UNAME_MACHINE}-ibm-linux exit ;; sh64*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; sh*:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; sparc:Linux:*:* | sparc64:Linux:*:*) - echo ${UNAME_MACHINE}-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + tile*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; vax:Linux:*:*) - echo ${UNAME_MACHINE}-dec-linux-gnu + echo ${UNAME_MACHINE}-dec-linux-${LIBC} exit ;; x86_64:Linux:*:*) - echo x86_64-unknown-linux-gnu + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} + exit ;; + xtensa*:Linux:*:*) + echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; - i*86:Linux:*:*) - # The BFD linker knows what the default object file format is, so - # first see if it will tell us. cd to the root directory to prevent - # problems with other programs or directories called `ld' in the path. - # Set LC_ALL=C to ensure ld outputs messages in English. - ld_supported_targets=`cd /; LC_ALL=C ld --help 2>&1 \ - | sed -ne '/supported targets:/!d - s/[ ][ ]*/ /g - s/.*supported targets: *// - s/ .*// - p'` - case "$ld_supported_targets" in - elf32-i386) - TENTATIVE="${UNAME_MACHINE}-pc-linux-gnu" - ;; - a.out-i386-linux) - echo "${UNAME_MACHINE}-pc-linux-gnuaout" - exit ;; - coff-i386) - echo "${UNAME_MACHINE}-pc-linux-gnucoff" - exit ;; - "") - # Either a pre-BFD a.out linker (linux-gnuoldld) or - # one that does not give us useful --help. - echo "${UNAME_MACHINE}-pc-linux-gnuoldld" - exit ;; - esac - # Determine whether the default compiler is a.out or elf - eval $set_cc_for_build - sed 's/^ //' << EOF >$dummy.c - #include - #ifdef __ELF__ - # ifdef __GLIBC__ - # if __GLIBC__ >= 2 - LIBC=gnu - # else - LIBC=gnulibc1 - # endif - # else - LIBC=gnulibc1 - # endif - #else - #if defined(__INTEL_COMPILER) || defined(__PGI) || defined(__SUNPRO_C) || defined(__SUNPRO_CC) - LIBC=gnu - #else - LIBC=gnuaout - #endif - #endif - #ifdef __dietlibc__ - LIBC=dietlibc - #endif -EOF - eval "`$CC_FOR_BUILD -E $dummy.c 2>/dev/null | sed -n ' - /^LIBC/{ - s: ::g - p - }'`" - test x"${LIBC}" != x && { - echo "${UNAME_MACHINE}-pc-linux-${LIBC}" - exit - } - test x"${TENTATIVE}" != x && { echo "${TENTATIVE}"; exit; } - ;; i*86:DYNIX/ptx:4*:*) # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there. # earlier versions are messed up and put the nodename in both @@ -1020,11 +1026,11 @@ echo i386-sequent-sysv4 exit ;; i*86:UNIX_SV:4.2MP:2.*) - # Unixware is an offshoot of SVR4, but it has its own version - # number series starting with 2... - # I am not positive that other SVR4 systems won't match this, + # Unixware is an offshoot of SVR4, but it has its own version + # number series starting with 2... + # I am not positive that other SVR4 systems won't match this, # I just have to hope. -- rms. - # Use sysv4.2uw... so that sysv4* matches it. + # Use sysv4.2uw... so that sysv4* matches it. echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION} exit ;; i*86:OS/2:*:*) @@ -1041,7 +1047,7 @@ i*86:syllable:*:*) echo ${UNAME_MACHINE}-pc-syllable exit ;; - i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.0*:*) + i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*) echo i386-unknown-lynxos${UNAME_RELEASE} exit ;; i*86:*DOS:*:*) @@ -1056,7 +1062,7 @@ fi exit ;; i*86:*:5:[678]*) - # UnixWare 7.x, OpenUNIX and OpenServer 6. + # UnixWare 7.x, OpenUNIX and OpenServer 6. case `/bin/uname -X | grep "^Machine"` in *486*) UNAME_MACHINE=i486 ;; *Pentium) UNAME_MACHINE=i586 ;; @@ -1084,10 +1090,13 @@ exit ;; pc:*:*:*) # Left here for compatibility: - # uname -m prints for DJGPP always 'pc', but it prints nothing about - # the processor, so we play safe by assuming i386. - echo i386-pc-msdosdjgpp - exit ;; + # uname -m prints for DJGPP always 'pc', but it prints nothing about + # the processor, so we play safe by assuming i586. + # Note: whatever this is, it MUST be the same as what config.sub + # prints for the "djgpp" host, or else GDB configury will decide that + # this is a cross-build. + echo i586-pc-msdosdjgpp + exit ;; Intel:Mach:3*:*) echo i386-pc-mach3 exit ;; @@ -1122,8 +1131,18 @@ /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*) - /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ - && { echo i486-ncr-sysv4; exit; } ;; + /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ + && { echo i486-ncr-sysv4; exit; } ;; + NCR*:*:4.2:* | MPRAS*:*:4.2:*) + OS_REL='.3' + test -r /etc/.relid \ + && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` + /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ + && { echo i486-ncr-sysv4.3${OS_REL}; exit; } + /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ + && { echo i586-ncr-sysv4.3${OS_REL}; exit; } + /bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \ + && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*) echo m68k-unknown-lynxos${UNAME_RELEASE} exit ;; @@ -1136,7 +1155,7 @@ rs6000:LynxOS:2.*:*) echo rs6000-unknown-lynxos${UNAME_RELEASE} exit ;; - PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.0*:*) + PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*) echo powerpc-unknown-lynxos${UNAME_RELEASE} exit ;; SM[BE]S:UNIX_SV:*:*) @@ -1156,10 +1175,10 @@ echo ns32k-sni-sysv fi exit ;; - PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort - # says - echo i586-unisys-sysv4 - exit ;; + PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort + # says + echo i586-unisys-sysv4 + exit ;; *:UNIX_System_V:4*:FTX*) # From Gerald Hewes . # How about differentiating between stratus architectures? -djm @@ -1185,11 +1204,11 @@ exit ;; R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*) if [ -d /usr/nec ]; then - echo mips-nec-sysv${UNAME_RELEASE} + echo mips-nec-sysv${UNAME_RELEASE} else - echo mips-unknown-sysv${UNAME_RELEASE} + echo mips-unknown-sysv${UNAME_RELEASE} fi - exit ;; + exit ;; BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only. echo powerpc-be-beos exit ;; @@ -1199,6 +1218,12 @@ BePC:BeOS:*:*) # BeOS running on Intel PC compatible. echo i586-pc-beos exit ;; + BePC:Haiku:*:*) # Haiku running on Intel PC compatible. + echo i586-pc-haiku + exit ;; + x86_64:Haiku:*:*) + echo x86_64-unknown-haiku + exit ;; SX-4:SUPER-UX:*:*) echo sx4-nec-superux${UNAME_RELEASE} exit ;; @@ -1208,6 +1233,15 @@ SX-6:SUPER-UX:*:*) echo sx6-nec-superux${UNAME_RELEASE} exit ;; + SX-7:SUPER-UX:*:*) + echo sx7-nec-superux${UNAME_RELEASE} + exit ;; + SX-8:SUPER-UX:*:*) + echo sx8-nec-superux${UNAME_RELEASE} + exit ;; + SX-8R:SUPER-UX:*:*) + echo sx8r-nec-superux${UNAME_RELEASE} + exit ;; Power*:Rhapsody:*:*) echo powerpc-apple-rhapsody${UNAME_RELEASE} exit ;; @@ -1217,6 +1251,16 @@ *:Darwin:*:*) UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown case $UNAME_PROCESSOR in + i386) + eval $set_cc_for_build + if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then + if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \ + (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \ + grep IS_64BIT_ARCH >/dev/null + then + UNAME_PROCESSOR="x86_64" + fi + fi ;; unknown) UNAME_PROCESSOR=powerpc ;; esac echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE} @@ -1232,7 +1276,10 @@ *:QNX:*:4*) echo i386-pc-qnx exit ;; - NSE-?:NONSTOP_KERNEL:*:*) + NEO-?:NONSTOP_KERNEL:*:*) + echo neo-tandem-nsk${UNAME_RELEASE} + exit ;; + NSE-*:NONSTOP_KERNEL:*:*) echo nse-tandem-nsk${UNAME_RELEASE} exit ;; NSR-?:NONSTOP_KERNEL:*:*) @@ -1277,13 +1324,13 @@ echo pdp10-unknown-its exit ;; SEI:*:*:SEIUX) - echo mips-sei-seiux${UNAME_RELEASE} + echo mips-sei-seiux${UNAME_RELEASE} exit ;; *:DragonFly:*:*) echo ${UNAME_MACHINE}-unknown-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` exit ;; *:*VMS:*:*) - UNAME_MACHINE=`(uname -p) 2>/dev/null` + UNAME_MACHINE=`(uname -p) 2>/dev/null` case "${UNAME_MACHINE}" in A*) echo alpha-dec-vms ; exit ;; I*) echo ia64-dec-vms ; exit ;; @@ -1298,11 +1345,14 @@ i*86:rdos:*:*) echo ${UNAME_MACHINE}-pc-rdos exit ;; + i*86:AROS:*:*) + echo ${UNAME_MACHINE}-pc-aros + exit ;; + x86_64:VMkernel:*:*) + echo ${UNAME_MACHINE}-unknown-esx + exit ;; esac -#echo '(No uname command or uname output not recognized.)' 1>&2 -#echo "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" 1>&2 - eval $set_cc_for_build cat >$dummy.c < printf ("m68k-sony-newsos%s\n", #ifdef NEWSOS4 - "4" + "4" #else - "" + "" #endif - ); exit (0); + ); exit (0); #endif #endif @@ -1458,9 +1508,9 @@ the operating system you are using. It is advised that you download the most up to date version of the config scripts from - http://savannah.gnu.org/cgi-bin/viewcvs/*checkout*/config/config/config.guess + http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD and - http://savannah.gnu.org/cgi-bin/viewcvs/*checkout*/config/config/config.sub + http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD If the version you run ($0) is already up to date, please send the following data and any information you think might be diff -Nru couchdb-1.2.0/build-aux/config.sub couchdb-1.4.0~rc.1/build-aux/config.sub --- couchdb-1.2.0/build-aux/config.sub 2011-11-03 17:56:27.000000000 -0400 +++ couchdb-1.4.0~rc.1/build-aux/config.sub 2013-05-20 03:33:19.000000000 -0400 @@ -1,44 +1,42 @@ #! /bin/sh # Configuration validation subroutine script. # Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, -# 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, -# Inc. +# 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +# 2011, 2012, 2013 Free Software Foundation, Inc. -timestamp='2006-09-20' +timestamp='2013-01-11' -# This file is (in principle) common to ALL GNU software. -# The presence of a machine in this file suggests that SOME GNU software -# can handle that machine. It does not imply ALL GNU software can. -# -# This file is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or +# This file is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. # # You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA -# 02110-1301, USA. +# along with this program; if not, see . # # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under -# the same distribution terms that you use for the rest of that program. +# the same distribution terms that you use for the rest of that +# program. This Exception is an additional permission under section 7 +# of the GNU General Public License, version 3 ("GPLv3"). -# Please send patches to . Submit a context -# diff and a properly formatted ChangeLog entry. +# Please send patches with a ChangeLog entry to config-patches@gnu.org. # # Configuration subroutine to validate and canonicalize a configuration type. # Supply the specified configuration type as an argument. # If it is invalid, we print an error message on stderr and exit with code 1. # Otherwise, we print the canonical config type on stdout and succeed. +# You can get the latest version of this script from: +# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD + # This file is supposed to be the same for all GNU packages # and recognize all the CPU types, system types and aliases # that are meaningful with *any* GNU software. @@ -72,8 +70,9 @@ version="\ GNU config.sub ($timestamp) -Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005 -Free Software Foundation, Inc. +Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, +2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, +2012, 2013 Free Software Foundation, Inc. This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." @@ -120,12 +119,18 @@ # Here we must recognize all the valid KERNEL-OS combinations. maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'` case $maybe_os in - nto-qnx* | linux-gnu* | linux-dietlibc | linux-newlib* | linux-uclibc* | \ - uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | knetbsd*-gnu* | netbsd*-gnu* | \ + nto-qnx* | linux-gnu* | linux-android* | linux-dietlibc | linux-newlib* | \ + linux-musl* | linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | \ + knetbsd*-gnu* | netbsd*-gnu* | \ + kopensolaris*-gnu* | \ storm-chaos* | os2-emx* | rtmk-nova*) os=-$maybe_os basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'` ;; + android-linux) + os=-linux-android + basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`-unknown + ;; *) basic_machine=`echo $1 | sed 's/-[^-]*$//'` if [ $basic_machine != $1 ] @@ -148,10 +153,13 @@ -convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\ -c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \ -harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \ - -apple | -axis | -knuth | -cray) + -apple | -axis | -knuth | -cray | -microblaze*) os= basic_machine=$1 ;; + -bluegene*) + os=-cnk + ;; -sim | -cisco | -oki | -wec | -winbond) os= basic_machine=$1 @@ -166,10 +174,10 @@ os=-chorusos basic_machine=$1 ;; - -chorusrdb) - os=-chorusrdb + -chorusrdb) + os=-chorusrdb basic_machine=$1 - ;; + ;; -hiux*) os=-hiuxwe2 ;; @@ -214,6 +222,12 @@ -isc*) basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; + -lynx*178) + os=-lynxos178 + ;; + -lynx*5) + os=-lynxos5 + ;; -lynx*) os=-lynxos ;; @@ -238,24 +252,34 @@ # Some are omitted here because they have special meanings below. 1750a | 580 \ | a29k \ + | aarch64 | aarch64_be \ | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \ | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \ | am33_2.0 \ - | arc | arm | arm[bl]e | arme[lb] | armv[2345] | armv[345][lb] | avr | avr32 \ + | arc \ + | arm | arm[bl]e | arme[lb] | armv[2-8] | armv[3-8][lb] | armv7[arm] \ + | avr | avr32 \ + | be32 | be64 \ | bfin \ | c4x | clipper \ - | d10v | d30v | dlx | dsp16xx \ - | fr30 | frv \ + | d10v | d30v | dlx | dsp16xx | dvp \ + | epiphany \ + | fido | fr30 | frv \ | h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \ + | hexagon \ | i370 | i860 | i960 | ia64 \ | ip2k | iq2000 \ + | le32 | le64 \ + | lm32 \ | m32c | m32r | m32rle | m68000 | m68k | m88k \ - | maxq | mb | microblaze | mcore \ + | maxq | mb | microblaze | microblazeel | mcore | mep | metag \ | mips | mipsbe | mipseb | mipsel | mipsle \ | mips16 \ | mips64 | mips64el \ - | mips64vr | mips64vrel \ + | mips64octeon | mips64octeonel \ | mips64orion | mips64orionel \ + | mips64r5900 | mips64r5900el \ + | mips64vr | mips64vrel \ | mips64vr4100 | mips64vr4100el \ | mips64vr4300 | mips64vr4300el \ | mips64vr5000 | mips64vr5000el \ @@ -266,31 +290,45 @@ | mipsisa64r2 | mipsisa64r2el \ | mipsisa64sb1 | mipsisa64sb1el \ | mipsisa64sr71k | mipsisa64sr71kel \ + | mipsr5900 | mipsr5900el \ | mipstx39 | mipstx39el \ | mn10200 | mn10300 \ + | moxie \ | mt \ | msp430 \ + | nds32 | nds32le | nds32be \ | nios | nios2 \ | ns16k | ns32k \ + | open8 \ | or32 \ | pdp10 | pdp11 | pj | pjl \ - | powerpc | powerpc64 | powerpc64le | powerpcle | ppcbe \ + | powerpc | powerpc64 | powerpc64le | powerpcle \ | pyramid \ + | rl78 | rx \ | score \ - | sh | sh[1234] | sh[24]a | sh[23]e | sh[34]eb | sheb | shbe | shle | sh[1234]le | sh3ele \ + | sh | sh[1234] | sh[24]a | sh[24]aeb | sh[23]e | sh[34]eb | sheb | shbe | shle | sh[1234]le | sh3ele \ | sh64 | sh64le \ | sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet | sparclite \ | sparcv8 | sparcv9 | sparcv9b | sparcv9v \ - | spu | strongarm \ - | tahoe | thumb | tic4x | tic80 | tron \ - | v850 | v850e \ + | spu \ + | tahoe | tic4x | tic54x | tic55x | tic6x | tic80 | tron \ + | ubicom32 \ + | v850 | v850e | v850e1 | v850e2 | v850es | v850e2v3 \ | we32k \ - | x86 | xc16x | xscale | xscalee[bl] | xstormy16 | xtensa \ - | z8k) + | x86 | xc16x | xstormy16 | xtensa \ + | z8k | z80) basic_machine=$basic_machine-unknown ;; - m6811 | m68hc11 | m6812 | m68hc12) - # Motorola 68HC11/12. + c54x) + basic_machine=tic54x-unknown + ;; + c55x) + basic_machine=tic55x-unknown + ;; + c6x) + basic_machine=tic6x-unknown + ;; + m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | picochip) basic_machine=$basic_machine-unknown os=-none ;; @@ -300,6 +338,21 @@ basic_machine=mt-unknown ;; + strongarm | thumb | xscale) + basic_machine=arm-unknown + ;; + xgate) + basic_machine=$basic_machine-unknown + os=-none + ;; + xscaleeb) + basic_machine=armeb-unknown + ;; + + xscaleel) + basic_machine=armel-unknown + ;; + # We use `pc' rather than `unknown' # because (1) that's what they normally are, and # (2) the word "unknown" tends to confuse beginning users. @@ -314,29 +367,37 @@ # Recognize the basic CPU types with company name. 580-* \ | a29k-* \ + | aarch64-* | aarch64_be-* \ | alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \ | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \ | alphapca5[67]-* | alpha64pca5[67]-* | arc-* \ | arm-* | armbe-* | armle-* | armeb-* | armv*-* \ | avr-* | avr32-* \ + | be32-* | be64-* \ | bfin-* | bs2000-* \ - | c[123]* | c30-* | [cjt]90-* | c4x-* | c54x-* | c55x-* | c6x-* \ + | c[123]* | c30-* | [cjt]90-* | c4x-* \ | clipper-* | craynv-* | cydra-* \ | d10v-* | d30v-* | dlx-* \ | elxsi-* \ - | f30[01]-* | f700-* | fr30-* | frv-* | fx80-* \ + | f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \ | h8300-* | h8500-* \ | hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \ + | hexagon-* \ | i*86-* | i860-* | i960-* | ia64-* \ | ip2k-* | iq2000-* \ + | le32-* | le64-* \ + | lm32-* \ | m32c-* | m32r-* | m32rle-* \ | m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \ - | m88110-* | m88k-* | maxq-* | mcore-* \ + | m88110-* | m88k-* | maxq-* | mcore-* | metag-* \ + | microblaze-* | microblazeel-* \ | mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \ | mips16-* \ | mips64-* | mips64el-* \ - | mips64vr-* | mips64vrel-* \ + | mips64octeon-* | mips64octeonel-* \ | mips64orion-* | mips64orionel-* \ + | mips64r5900-* | mips64r5900el-* \ + | mips64vr-* | mips64vrel-* \ | mips64vr4100-* | mips64vr4100el-* \ | mips64vr4300-* | mips64vr4300el-* \ | mips64vr5000-* | mips64vr5000el-* \ @@ -347,31 +408,41 @@ | mipsisa64r2-* | mipsisa64r2el-* \ | mipsisa64sb1-* | mipsisa64sb1el-* \ | mipsisa64sr71k-* | mipsisa64sr71kel-* \ + | mipsr5900-* | mipsr5900el-* \ | mipstx39-* | mipstx39el-* \ | mmix-* \ | mt-* \ | msp430-* \ + | nds32-* | nds32le-* | nds32be-* \ | nios-* | nios2-* \ | none-* | np1-* | ns16k-* | ns32k-* \ + | open8-* \ | orion-* \ | pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \ - | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* | ppcbe-* \ + | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* \ | pyramid-* \ - | romp-* | rs6000-* \ - | sh-* | sh[1234]-* | sh[24]a-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \ + | rl78-* | romp-* | rs6000-* | rx-* \ + | sh-* | sh[1234]-* | sh[24]a-* | sh[24]aeb-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \ | shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \ | sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \ | sparclite-* \ - | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | strongarm-* | sv1-* | sx?-* \ - | tahoe-* | thumb-* \ + | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | sv1-* | sx?-* \ + | tahoe-* \ | tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \ + | tile*-* \ | tron-* \ - | v850-* | v850e-* | vax-* \ + | ubicom32-* \ + | v850-* | v850e-* | v850e1-* | v850es-* | v850e2-* | v850e2v3-* \ + | vax-* \ | we32k-* \ - | x86-* | x86_64-* | xc16x-* | xps100-* | xscale-* | xscalee[bl]-* \ - | xstormy16-* | xtensa-* \ + | x86-* | x86_64-* | xc16x-* | xps100-* \ + | xstormy16-* | xtensa*-* \ | ymp-* \ - | z8k-*) + | z8k-* | z80-*) + ;; + # Recognize the basic CPU types without company name, with glob match. + xtensa*) + basic_machine=$basic_machine-unknown ;; # Recognize the various machine names and aliases which stand # for a CPU type and a company and sometimes even an OS. @@ -389,7 +460,7 @@ basic_machine=a29k-amd os=-udi ;; - abacus) + abacus) basic_machine=abacus-unknown ;; adobe68k) @@ -435,6 +506,10 @@ basic_machine=m68k-apollo os=-bsd ;; + aros) + basic_machine=i386-pc + os=-aros + ;; aux) basic_machine=m68k-apple os=-aux @@ -443,10 +518,35 @@ basic_machine=ns32k-sequent os=-dynix ;; + blackfin) + basic_machine=bfin-unknown + os=-linux + ;; + blackfin-*) + basic_machine=bfin-`echo $basic_machine | sed 's/^[^-]*-//'` + os=-linux + ;; + bluegene*) + basic_machine=powerpc-ibm + os=-cnk + ;; + c54x-*) + basic_machine=tic54x-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + c55x-*) + basic_machine=tic55x-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + c6x-*) + basic_machine=tic6x-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; c90) basic_machine=c90-cray os=-unicos ;; + cegcc) + basic_machine=arm-unknown + os=-cegcc + ;; convex-c1) basic_machine=c1-convex os=-bsd @@ -475,8 +575,8 @@ basic_machine=craynv-cray os=-unicosmp ;; - cr16c) - basic_machine=cr16c-unknown + cr16 | cr16-*) + basic_machine=cr16-unknown os=-elf ;; crds | unos) @@ -514,6 +614,10 @@ basic_machine=m88k-motorola os=-sysv3 ;; + dicos) + basic_machine=i686-pc + os=-dicos + ;; djgpp) basic_machine=i586-pc os=-msdosdjgpp @@ -629,7 +733,6 @@ i370-ibm* | ibm*) basic_machine=i370-ibm ;; -# I'm not sure what "Sysv32" means. Should this be sysv3.2? i*86v32) basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` os=-sysv32 @@ -668,6 +771,14 @@ basic_machine=m68k-isi os=-sysv ;; + m68knommu) + basic_machine=m68k-unknown + os=-linux + ;; + m68knommu-*) + basic_machine=m68k-`echo $basic_machine | sed 's/^[^-]*-//'` + os=-linux + ;; m88k-omron*) basic_machine=m88k-omron ;; @@ -679,10 +790,21 @@ basic_machine=ns32k-utek os=-sysv ;; + microblaze*) + basic_machine=microblaze-xilinx + ;; + mingw64) + basic_machine=x86_64-pc + os=-mingw64 + ;; mingw32) basic_machine=i386-pc os=-mingw32 ;; + mingw32ce) + basic_machine=arm-unknown + os=-mingw32ce + ;; miniframe) basic_machine=m68000-convergent ;; @@ -690,6 +812,24 @@ basic_machine=m68k-atari os=-mint ;; + mipsEE* | ee | ps2) + basic_machine=mips64r5900el-scei + case $os in + -linux*) + ;; + *) + os=-elf + ;; + esac + ;; + iop) + basic_machine=mipsel-scei + os=-irx + ;; + dvp) + basic_machine=dvp-scei + os=-elf + ;; mips3*-*) basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'` ;; @@ -711,10 +851,18 @@ ms1-*) basic_machine=`echo $basic_machine | sed -e 's/ms1-/mt-/'` ;; + msys) + basic_machine=i386-pc + os=-msys + ;; mvs) basic_machine=i370-ibm os=-mvs ;; + nacl) + basic_machine=le32-unknown + os=-nacl + ;; ncr3000) basic_machine=i486-ncr os=-sysv4 @@ -779,6 +927,12 @@ np1) basic_machine=np1-gould ;; + neo-tandem) + basic_machine=neo-tandem + ;; + nse-tandem) + basic_machine=nse-tandem + ;; nsr-tandem) basic_machine=nsr-tandem ;; @@ -809,6 +963,14 @@ basic_machine=i860-intel os=-osf ;; + parisc) + basic_machine=hppa-unknown + os=-linux + ;; + parisc-*) + basic_machine=hppa-`echo $basic_machine | sed 's/^[^-]*-//'` + os=-linux + ;; pbd) basic_machine=sparc-tti ;; @@ -853,9 +1015,10 @@ ;; power) basic_machine=power-ibm ;; - ppc) basic_machine=powerpc-unknown + ppc | ppcbe) basic_machine=powerpc-unknown ;; - ppc-*) basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'` + ppc-* | ppcbe-*) + basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ppcle | powerpclittle | ppc-le | powerpc-little) basic_machine=powerpcle-unknown @@ -880,7 +1043,11 @@ basic_machine=i586-unknown os=-pw32 ;; - rdos) + rdos | rdos64) + basic_machine=x86_64-pc + os=-rdos + ;; + rdos32) basic_machine=i386-pc os=-rdos ;; @@ -925,6 +1092,9 @@ basic_machine=sh-hitachi os=-hms ;; + sh5el) + basic_machine=sh5le-unknown + ;; sh64) basic_machine=sh64-unknown ;; @@ -946,6 +1116,9 @@ basic_machine=i860-stratus os=-sysv4 ;; + strongarm-* | thumb-*) + basic_machine=arm-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; sun2) basic_machine=m68000-sun ;; @@ -1002,17 +1175,9 @@ basic_machine=t90-cray os=-unicos ;; - tic54x | c54x*) - basic_machine=tic54x-unknown - os=-coff - ;; - tic55x | c55x*) - basic_machine=tic55x-unknown - os=-coff - ;; - tic6x | c6x*) - basic_machine=tic6x-unknown - os=-coff + tile*) + basic_machine=$basic_machine-unknown + os=-linux-gnu ;; tx39) basic_machine=mipstx39-unknown @@ -1081,6 +1246,9 @@ xps | xps100) basic_machine=xps100-honeywell ;; + xscale-* | xscalee[bl]-*) + basic_machine=`echo $basic_machine | sed 's/^xscale/arm/'` + ;; ymp) basic_machine=ymp-cray os=-unicos @@ -1089,6 +1257,10 @@ basic_machine=z8k-unknown os=-sim ;; + z80-*-coff) + basic_machine=z80-unknown + os=-sim + ;; none) basic_machine=none-none os=-none @@ -1127,7 +1299,7 @@ we32k) basic_machine=we32k-att ;; - sh[1234] | sh[24]a | sh[34]eb | sh[1234]le | sh[23]ele) + sh[1234] | sh[24]a | sh[24]aeb | sh[34]eb | sh[1234]le | sh[23]ele) basic_machine=sh-unknown ;; sparc | sparcv8 | sparcv9 | sparcv9b | sparcv9v) @@ -1174,9 +1346,12 @@ if [ x"$os" != x"" ] then case $os in - # First match some system type aliases - # that might get confused with valid system types. + # First match some system type aliases + # that might get confused with valid system types. # -solaris* is a basic system type, with this one exception. + -auroraux) + os=-auroraux + ;; -solaris1 | -solaris1.*) os=`echo $os | sed -e 's|solaris1|sunos4|'` ;; @@ -1197,29 +1372,31 @@ # Each alternative MUST END IN A *, to match a version number. # -sysv* is not here because it comes later, after sysvr4. -gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \ - | -*vms* | -sco* | -esix* | -isc* | -aix* | -sunos | -sunos[34]*\ - | -hpux* | -unos* | -osf* | -luna* | -dgux* | -solaris* | -sym* \ + | -*vms* | -sco* | -esix* | -isc* | -aix* | -cnk* | -sunos | -sunos[34]*\ + | -hpux* | -unos* | -osf* | -luna* | -dgux* | -auroraux* | -solaris* \ + | -sym* | -kopensolaris* | -plan9* \ | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \ - | -aos* \ + | -aos* | -aros* \ | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \ | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \ | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* \ - | -openbsd* | -solidbsd* \ + | -bitrig* | -openbsd* | -solidbsd* \ | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \ | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \ | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \ | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \ - | -chorusos* | -chorusrdb* \ - | -cygwin* | -pe* | -psos* | -moss* | -proelf* | -rtems* \ - | -mingw32* | -linux-gnu* | -linux-newlib* | -linux-uclibc* \ + | -chorusos* | -chorusrdb* | -cegcc* \ + | -cygwin* | -msys* | -pe* | -psos* | -moss* | -proelf* | -rtems* \ + | -mingw32* | -mingw64* | -linux-gnu* | -linux-android* \ + | -linux-newlib* | -linux-musl* | -linux-uclibc* \ | -uxpv* | -beos* | -mpeix* | -udk* \ | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \ | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \ - | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \ + | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* | -irx* \ | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \ | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \ | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \ - | -skyos* | -haiku* | -rdos* | -toppers*) + | -skyos* | -haiku* | -rdos* | -toppers* | -drops* | -es*) # Remember, each alternative MUST END IN *, to match a version number. ;; -qnx*) @@ -1258,7 +1435,7 @@ -opened*) os=-openedition ;; - -os400*) + -os400*) os=-os400 ;; -wince*) @@ -1307,7 +1484,7 @@ -sinix*) os=-sysv4 ;; - -tpf*) + -tpf*) os=-tpf ;; -triton*) @@ -1343,12 +1520,14 @@ -aros*) os=-aros ;; - -kaos*) - os=-kaos - ;; -zvmoe) os=-zvmoe ;; + -dicos*) + os=-dicos + ;; + -nacl*) + ;; -none) ;; *) @@ -1371,10 +1550,10 @@ # system, and we'll never get to this point. case $basic_machine in - score-*) + score-*) os=-elf ;; - spu-*) + spu-*) os=-elf ;; *-acorn) @@ -1386,8 +1565,20 @@ arm*-semi) os=-aout ;; - c4x-* | tic4x-*) - os=-coff + c4x-* | tic4x-*) + os=-coff + ;; + hexagon-*) + os=-elf + ;; + tic54x-*) + os=-coff + ;; + tic55x-*) + os=-coff + ;; + tic6x-*) + os=-coff ;; # This must come before the *-dec entry. pdp10-*) @@ -1407,13 +1598,13 @@ ;; m68000-sun) os=-sunos3 - # This also exists in the configure program, but was not the - # default. - # os=-sunos4 ;; m68*-cisco) os=-aout ;; + mep-*) + os=-elf + ;; mips*-cisco) os=-elf ;; @@ -1438,7 +1629,7 @@ *-ibm) os=-aix ;; - *-knuth) + *-knuth) os=-mmixware ;; *-wec) @@ -1543,7 +1734,7 @@ -sunos*) vendor=sun ;; - -aix*) + -cnk*|-aix*) vendor=ibm ;; -beos*) diff -Nru couchdb-1.2.0/build-aux/depcomp couchdb-1.4.0~rc.1/build-aux/depcomp --- couchdb-1.2.0/build-aux/depcomp 2011-11-03 17:56:27.000000000 -0400 +++ couchdb-1.4.0~rc.1/build-aux/depcomp 2012-09-03 11:10:15.000000000 -0400 @@ -1,10 +1,10 @@ #! /bin/sh # depcomp - compile a program generating dependencies as side-effects -scriptversion=2006-10-15.18 +scriptversion=2012-03-27.16; # UTC -# Copyright (C) 1999, 2000, 2003, 2004, 2005, 2006 Free Software -# Foundation, Inc. +# Copyright (C) 1999, 2000, 2003, 2004, 2005, 2006, 2007, 2009, 2010, +# 2011, 2012 Free Software Foundation, Inc. # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -17,9 +17,7 @@ # GNU General Public License for more details. # You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA -# 02110-1301, USA. +# along with this program. If not, see . # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a @@ -30,7 +28,7 @@ case $1 in '') - echo "$0: No command. Try \`$0 --help' for more information." 1>&2 + echo "$0: No command. Try '$0 --help' for more information." 1>&2 exit 1; ;; -h | --h*) @@ -42,11 +40,11 @@ Environment variables: depmode Dependency tracking mode. - source Source file read by `PROGRAMS ARGS'. - object Object file output by `PROGRAMS ARGS'. + source Source file read by 'PROGRAMS ARGS'. + object Object file output by 'PROGRAMS ARGS'. DEPDIR directory where to store dependencies. depfile Dependency file to output. - tmpdepfile Temporary file to use when outputing dependencies. + tmpdepfile Temporary file to use when outputting dependencies. libtool Whether libtool is used (yes/no). Report bugs to . @@ -59,6 +57,12 @@ ;; esac +# A tabulation character. +tab=' ' +# A newline character. +nl=' +' + if test -z "$depmode" || test -z "$source" || test -z "$object"; then echo "depcomp: Variables source, object and depmode must be set" 1>&2 exit 1 @@ -87,6 +91,29 @@ depmode=dashmstdout fi +cygpath_u="cygpath -u -f -" +if test "$depmode" = msvcmsys; then + # This is just like msvisualcpp but w/o cygpath translation. + # Just convert the backslash-escaped backslashes to single forward + # slashes to satisfy depend.m4 + cygpath_u='sed s,\\\\,/,g' + depmode=msvisualcpp +fi + +if test "$depmode" = msvc7msys; then + # This is just like msvc7 but w/o cygpath translation. + # Just convert the backslash-escaped backslashes to single forward + # slashes to satisfy depend.m4 + cygpath_u='sed s,\\\\,/,g' + depmode=msvc7 +fi + +if test "$depmode" = xlc; then + # IBM C/C++ Compilers xlc/xlC can output gcc-like dependency informations. + gccflag=-qmakedep=gcc,-MF + depmode=gcc +fi + case "$depmode" in gcc3) ## gcc 3 implements dependency tracking that does exactly what @@ -141,20 +168,21 @@ ## The second -e expression handles DOS-style file names with drive letters. sed -e 's/^[^:]*: / /' \ -e 's/^['$alpha']:\/[^:]*: / /' < "$tmpdepfile" >> "$depfile" -## This next piece of magic avoids the `deleted header file' problem. +## This next piece of magic avoids the "deleted header file" problem. ## The problem is that when a header file which appears in a .P file ## is deleted, the dependency causes make to die (because there is ## typically no way to rebuild the header). We avoid this by adding ## dummy dependencies for each header file. Too bad gcc doesn't do ## this for us directly. - tr ' ' ' -' < "$tmpdepfile" | -## Some versions of gcc put a space before the `:'. On the theory + tr ' ' "$nl" < "$tmpdepfile" | +## Some versions of gcc put a space before the ':'. On the theory ## that the space means something, we add a space to the output as -## well. +## well. hp depmode also adds that space, but also prefixes the VPATH +## to the object. Take care to not repeat it in the output. ## Some versions of the HPUX 10.20 sed can't process this invocation ## correctly. Breaking it into two sed invocations is a workaround. - sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile" + sed -e 's/^\\$//' -e '/^$/d' -e "s|.*$object$||" -e '/:$/d' \ + | sed -e 's/$/ :/' >> "$depfile" rm -f "$tmpdepfile" ;; @@ -186,20 +214,17 @@ # clever and replace this with sed code, as IRIX sed won't handle # lines with more than a fixed number of characters (4096 in # IRIX 6.2 sed, 8192 in IRIX 6.5). We also remove comment lines; - # the IRIX cc adds comments like `#:fec' to the end of the + # the IRIX cc adds comments like '#:fec' to the end of the # dependency line. - tr ' ' ' -' < "$tmpdepfile" \ + tr ' ' "$nl" < "$tmpdepfile" \ | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' | \ - tr ' -' ' ' >> $depfile - echo >> $depfile + tr "$nl" ' ' >> "$depfile" + echo >> "$depfile" # The second pass generates a dummy entry for each header file. - tr ' ' ' -' < "$tmpdepfile" \ + tr ' ' "$nl" < "$tmpdepfile" \ | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' -e 's/$/:/' \ - >> $depfile + >> "$depfile" else # The sourcefile does not contain any dependencies, so just # store a dummy comment line, to avoid errors with the Makefile @@ -209,40 +234,51 @@ rm -f "$tmpdepfile" ;; +xlc) + # This case exists only to let depend.m4 do its work. It works by + # looking at the text of this script. This case will never be run, + # since it is checked for above. + exit 1 + ;; + aix) # The C for AIX Compiler uses -M and outputs the dependencies # in a .u file. In older versions, this file always lives in the - # current directory. Also, the AIX compiler puts `$object:' at the + # current directory. Also, the AIX compiler puts '$object:' at the # start of each line; $object doesn't have directory information. # Version 6 uses the directory in both cases. - stripped=`echo "$object" | sed 's/\(.*\)\..*$/\1/'` - tmpdepfile="$stripped.u" + dir=`echo "$object" | sed -e 's|/[^/]*$|/|'` + test "x$dir" = "x$object" && dir= + base=`echo "$object" | sed -e 's|^.*/||' -e 's/\.o$//' -e 's/\.lo$//'` if test "$libtool" = yes; then + tmpdepfile1=$dir$base.u + tmpdepfile2=$base.u + tmpdepfile3=$dir.libs/$base.u "$@" -Wc,-M else + tmpdepfile1=$dir$base.u + tmpdepfile2=$dir$base.u + tmpdepfile3=$dir$base.u "$@" -M fi stat=$? - if test -f "$tmpdepfile"; then : - else - stripped=`echo "$stripped" | sed 's,^.*/,,'` - tmpdepfile="$stripped.u" - fi - if test $stat -eq 0; then : else - rm -f "$tmpdepfile" + rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" exit $stat fi + for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" + do + test -f "$tmpdepfile" && break + done if test -f "$tmpdepfile"; then - outname="$stripped.o" - # Each line is of the form `foo.o: dependent.h'. + # Each line is of the form 'foo.o: dependent.h'. # Do two passes, one to just change these to - # `$object: dependent.h' and one to simply `dependent.h:'. - sed -e "s,^$outname:,$object :," < "$tmpdepfile" > "$depfile" - sed -e "s,^$outname: \(.*\)$,\1:," < "$tmpdepfile" >> "$depfile" + # '$object: dependent.h' and one to simply 'dependent.h:'. + sed -e "s,^.*\.[a-z]*:,$object:," < "$tmpdepfile" > "$depfile" + sed -e 's,^.*\.[a-z]*:['"$tab"' ]*,,' -e 's,$,:,' < "$tmpdepfile" >> "$depfile" else # The sourcefile does not contain any dependencies, so just # store a dummy comment line, to avoid errors with the Makefile @@ -253,23 +289,26 @@ ;; icc) - # Intel's C compiler understands `-MD -MF file'. However on - # icc -MD -MF foo.d -c -o sub/foo.o sub/foo.c + # Intel's C compiler anf tcc (Tiny C Compiler) understand '-MD -MF file'. + # However on + # $CC -MD -MF foo.d -c -o sub/foo.o sub/foo.c # ICC 7.0 will fill foo.d with something like # foo.o: sub/foo.c # foo.o: sub/foo.h - # which is wrong. We want: + # which is wrong. We want # sub/foo.o: sub/foo.c # sub/foo.o: sub/foo.h # sub/foo.c: # sub/foo.h: # ICC 7.1 will output # foo.o: sub/foo.c sub/foo.h - # and will wrap long lines using \ : + # and will wrap long lines using '\': # foo.o: sub/foo.c ... \ # sub/foo.h ... \ # ... - + # tcc 0.9.26 (FIXME still under development at the moment of writing) + # will emit a similar output, but also prepend the continuation lines + # with horizontal tabulation characters. "$@" -MD -MF "$tmpdepfile" stat=$? if test $stat -eq 0; then : @@ -278,15 +317,21 @@ exit $stat fi rm -f "$depfile" - # Each line is of the form `foo.o: dependent.h', - # or `foo.o: dep1.h dep2.h \', or ` dep3.h dep4.h \'. + # Each line is of the form 'foo.o: dependent.h', + # or 'foo.o: dep1.h dep2.h \', or ' dep3.h dep4.h \'. # Do two passes, one to just change these to - # `$object: dependent.h' and one to simply `dependent.h:'. - sed "s,^[^:]*:,$object :," < "$tmpdepfile" > "$depfile" - # Some versions of the HPUX 10.20 sed can't process this invocation - # correctly. Breaking it into two sed invocations is a workaround. - sed 's,^[^:]*: \(.*\)$,\1,;s/^\\$//;/^$/d;/:$/d' < "$tmpdepfile" | - sed -e 's/$/ :/' >> "$depfile" + # '$object: dependent.h' and one to simply 'dependent.h:'. + sed -e "s/^[ $tab][ $tab]*/ /" -e "s,^[^:]*:,$object :," \ + < "$tmpdepfile" > "$depfile" + sed ' + s/[ '"$tab"'][ '"$tab"']*/ /g + s/^ *// + s/ *\\*$// + s/^[^:]*: *// + /^$/d + /:$/d + s/$/ :/ + ' < "$tmpdepfile" >> "$depfile" rm -f "$tmpdepfile" ;; @@ -322,8 +367,13 @@ done if test -f "$tmpdepfile"; then sed -e "s,^.*\.[a-z]*:,$object:," "$tmpdepfile" > "$depfile" - # Add `dependent.h:' lines. - sed -ne '2,${; s/^ *//; s/ \\*$//; s/$/:/; p;}' "$tmpdepfile" >> "$depfile" + # Add 'dependent.h:' lines. + sed -ne '2,${ + s/^ *// + s/ \\*$// + s/$/:/ + p + }' "$tmpdepfile" >> "$depfile" else echo "#dummy" > "$depfile" fi @@ -332,9 +382,9 @@ tru64) # The Tru64 compiler uses -MD to generate dependencies as a side - # effect. `cc -MD -o foo.o ...' puts the dependencies into `foo.o.d'. + # effect. 'cc -MD -o foo.o ...' puts the dependencies into 'foo.o.d'. # At least on Alpha/Redhat 6.1, Compaq CCC V6.2-504 seems to put - # dependencies in `foo.d' instead, so we check for that too. + # dependencies in 'foo.d' instead, so we check for that too. # Subdirectories are respected. dir=`echo "$object" | sed -e 's|/[^/]*$|/|'` test "x$dir" = "x$object" && dir= @@ -380,14 +430,59 @@ done if test -f "$tmpdepfile"; then sed -e "s,^.*\.[a-z]*:,$object:," < "$tmpdepfile" > "$depfile" - # That's a tab and a space in the []. - sed -e 's,^.*\.[a-z]*:[ ]*,,' -e 's,$,:,' < "$tmpdepfile" >> "$depfile" + sed -e 's,^.*\.[a-z]*:['"$tab"' ]*,,' -e 's,$,:,' < "$tmpdepfile" >> "$depfile" else echo "#dummy" > "$depfile" fi rm -f "$tmpdepfile" ;; +msvc7) + if test "$libtool" = yes; then + showIncludes=-Wc,-showIncludes + else + showIncludes=-showIncludes + fi + "$@" $showIncludes > "$tmpdepfile" + stat=$? + grep -v '^Note: including file: ' "$tmpdepfile" + if test "$stat" = 0; then : + else + rm -f "$tmpdepfile" + exit $stat + fi + rm -f "$depfile" + echo "$object : \\" > "$depfile" + # The first sed program below extracts the file names and escapes + # backslashes for cygpath. The second sed program outputs the file + # name when reading, but also accumulates all include files in the + # hold buffer in order to output them again at the end. This only + # works with sed implementations that can handle large buffers. + sed < "$tmpdepfile" -n ' +/^Note: including file: *\(.*\)/ { + s//\1/ + s/\\/\\\\/g + p +}' | $cygpath_u | sort -u | sed -n ' +s/ /\\ /g +s/\(.*\)/'"$tab"'\1 \\/p +s/.\(.*\) \\/\1:/ +H +$ { + s/.*/'"$tab"'/ + G + p +}' >> "$depfile" + rm -f "$tmpdepfile" + ;; + +msvc7msys) + # This case exists only to let depend.m4 do its work. It works by + # looking at the text of this script. This case will never be run, + # since it is checked for above. + exit 1 + ;; + #nosideeffect) # This comment above is used by automake to tell side-effect # dependency tracking mechanisms from slower ones. @@ -399,13 +494,13 @@ # Remove the call to Libtool. if test "$libtool" = yes; then - while test $1 != '--mode=compile'; do + while test "X$1" != 'X--mode=compile'; do shift done shift fi - # Remove `-o $object'. + # Remove '-o $object'. IFS=" " for arg do @@ -425,15 +520,14 @@ done test -z "$dashmflag" && dashmflag=-M - # Require at least two characters before searching for `:' + # Require at least two characters before searching for ':' # in the target name. This is to cope with DOS-style filenames: - # a dependency such as `c:/foo/bar' could be seen as target `c' otherwise. + # a dependency such as 'c:/foo/bar' could be seen as target 'c' otherwise. "$@" $dashmflag | - sed 's:^[ ]*[^: ][^:][^:]*\:[ ]*:'"$object"'\: :' > "$tmpdepfile" + sed 's:^['"$tab"' ]*[^:'"$tab"' ][^:][^:]*\:['"$tab"' ]*:'"$object"'\: :' > "$tmpdepfile" rm -f "$depfile" cat < "$tmpdepfile" > "$depfile" - tr ' ' ' -' < "$tmpdepfile" | \ + tr ' ' "$nl" < "$tmpdepfile" | \ ## Some versions of the HPUX 10.20 sed can't process this invocation ## correctly. Breaking it into two sed invocations is a workaround. sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile" @@ -450,38 +544,46 @@ "$@" || exit $? # Remove any Libtool call if test "$libtool" = yes; then - while test $1 != '--mode=compile'; do + while test "X$1" != 'X--mode=compile'; do shift done shift fi # X makedepend shift - cleared=no - for arg in "$@"; do + cleared=no eat=no + for arg + do case $cleared in no) set ""; shift cleared=yes ;; esac + if test $eat = yes; then + eat=no + continue + fi case "$arg" in -D*|-I*) set fnord "$@" "$arg"; shift ;; # Strip any option that makedepend may not understand. Remove # the object too, otherwise makedepend will parse it as a source file. + -arch) + eat=yes ;; -*|$object) ;; *) set fnord "$@" "$arg"; shift ;; esac done - obj_suffix="`echo $object | sed 's/^.*\././'`" + obj_suffix=`echo "$object" | sed 's/^.*\././'` touch "$tmpdepfile" ${MAKEDEPEND-makedepend} -o"$obj_suffix" -f"$tmpdepfile" "$@" rm -f "$depfile" - cat < "$tmpdepfile" > "$depfile" - sed '1,2d' "$tmpdepfile" | tr ' ' ' -' | \ + # makedepend may prepend the VPATH from the source file name to the object. + # No need to regex-escape $object, excess matching of '.' is harmless. + sed "s|^.*\($object *:\)|\1|" "$tmpdepfile" > "$depfile" + sed '1,2d' "$tmpdepfile" | tr ' ' "$nl" | \ ## Some versions of the HPUX 10.20 sed can't process this invocation ## correctly. Breaking it into two sed invocations is a workaround. sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' | sed -e 's/$/ :/' >> "$depfile" @@ -495,13 +597,13 @@ # Remove the call to Libtool. if test "$libtool" = yes; then - while test $1 != '--mode=compile'; do + while test "X$1" != 'X--mode=compile'; do shift done shift fi - # Remove `-o $object'. + # Remove '-o $object'. IFS=" " for arg do @@ -533,13 +635,27 @@ msvisualcpp) # Important note: in order to support this mode, a compiler *must* - # always write the preprocessed file to stdout, regardless of -o, - # because we must use -o when running libtool. + # always write the preprocessed file to stdout. "$@" || exit $? + + # Remove the call to Libtool. + if test "$libtool" = yes; then + while test "X$1" != 'X--mode=compile'; do + shift + done + shift + fi + IFS=" " for arg do case "$arg" in + -o) + shift + ;; + $object) + shift + ;; "-Gm"|"/Gm"|"-Gi"|"/Gi"|"-ZI"|"/ZI") set fnord "$@" shift @@ -552,16 +668,23 @@ ;; esac done - "$@" -E | - sed -n '/^#line [0-9][0-9]* "\([^"]*\)"/ s::echo "`cygpath -u \\"\1\\"`":p' | sort | uniq > "$tmpdepfile" + "$@" -E 2>/dev/null | + sed -n '/^#line [0-9][0-9]* "\([^"]*\)"/ s::\1:p' | $cygpath_u | sort -u > "$tmpdepfile" rm -f "$depfile" echo "$object : \\" > "$depfile" - . "$tmpdepfile" | sed 's% %\\ %g' | sed -n '/^\(.*\)$/ s:: \1 \\:p' >> "$depfile" - echo " " >> "$depfile" - . "$tmpdepfile" | sed 's% %\\ %g' | sed -n '/^\(.*\)$/ s::\1\::p' >> "$depfile" + sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::'"$tab"'\1 \\:p' >> "$depfile" + echo "$tab" >> "$depfile" + sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::\1\::p' >> "$depfile" rm -f "$tmpdepfile" ;; +msvcmsys) + # This case exists only to let depend.m4 do its work. It works by + # looking at the text of this script. This case will never be run, + # since it is checked for above. + exit 1 + ;; + none) exec "$@" ;; @@ -580,5 +703,6 @@ # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "scriptversion=" # time-stamp-format: "%:y-%02m-%02d.%02H" -# time-stamp-end: "$" +# time-stamp-time-zone: "UTC" +# time-stamp-end: "; # UTC" # End: diff -Nru couchdb-1.2.0/build-aux/dist-error couchdb-1.4.0~rc.1/build-aux/dist-error --- couchdb-1.2.0/build-aux/dist-error 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/build-aux/dist-error 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,28 @@ +#!/bin/sh -e + +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +# This script is called by the build system and is used to provide an error +# about missing or empty files. Some files are optional, and will be built when +# the environment allows. But these files are required for distribution. + +cat << EOF +ERROR: This file is missing or incomplete: + + $1 + + This file is optional at build and install time, + but is required when preparing a distribution. +EOF + +exit 1 diff -Nru couchdb-1.2.0/build-aux/install-sh couchdb-1.4.0~rc.1/build-aux/install-sh --- couchdb-1.2.0/build-aux/install-sh 2011-11-03 17:56:27.000000000 -0400 +++ couchdb-1.4.0~rc.1/build-aux/install-sh 2012-09-03 11:10:15.000000000 -0400 @@ -1,7 +1,7 @@ #!/bin/sh # install - install a program, script, or datafile -scriptversion=2006-10-14.15 +scriptversion=2011-01-19.21; # UTC # This originates from X11R5 (mit/util/scripts/install.sh), which was # later released in X11R6 (xc/config/util/install.sh) with the @@ -48,7 +48,7 @@ # set DOITPROG to echo to test this script # Don't use :- since 4.3BSD and earlier shells don't like it. -doit="${DOITPROG-}" +doit=${DOITPROG-} if test -z "$doit"; then doit_exec=exec else @@ -58,34 +58,49 @@ # Put in absolute file names if you don't have them in your path; # or use environment vars. -mvprog="${MVPROG-mv}" -cpprog="${CPPROG-cp}" -chmodprog="${CHMODPROG-chmod}" -chownprog="${CHOWNPROG-chown}" -chgrpprog="${CHGRPPROG-chgrp}" -stripprog="${STRIPPROG-strip}" -rmprog="${RMPROG-rm}" -mkdirprog="${MKDIRPROG-mkdir}" +chgrpprog=${CHGRPPROG-chgrp} +chmodprog=${CHMODPROG-chmod} +chownprog=${CHOWNPROG-chown} +cmpprog=${CMPPROG-cmp} +cpprog=${CPPROG-cp} +mkdirprog=${MKDIRPROG-mkdir} +mvprog=${MVPROG-mv} +rmprog=${RMPROG-rm} +stripprog=${STRIPPROG-strip} + +posix_glob='?' +initialize_posix_glob=' + test "$posix_glob" != "?" || { + if (set -f) 2>/dev/null; then + posix_glob= + else + posix_glob=: + fi + } +' -posix_glob= posix_mkdir= # Desired mode of installed file. mode=0755 +chgrpcmd= chmodcmd=$chmodprog chowncmd= -chgrpcmd= -stripcmd= +mvcmd=$mvprog rmcmd="$rmprog -f" -mvcmd="$mvprog" +stripcmd= + src= dst= dir_arg= -dstarg= +dst_arg= + +copy_on_change=false no_target_directory= -usage="Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE +usage="\ +Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE or: $0 [OPTION]... SRCFILES... DIRECTORY or: $0 [OPTION]... -t DIRECTORY SRCFILES... or: $0 [OPTION]... -d DIRECTORIES... @@ -95,65 +110,59 @@ In the 4th, create DIRECTORIES. Options: --c (ignored) --d create directories instead of installing files. --g GROUP $chgrpprog installed files to GROUP. --m MODE $chmodprog installed files to MODE. --o USER $chownprog installed files to USER. --s $stripprog installed files. --t DIRECTORY install into DIRECTORY. --T report an error if DSTFILE is a directory. ---help display this help and exit. ---version display version info and exit. + --help display this help and exit. + --version display version info and exit. + + -c (ignored) + -C install only if different (preserve the last data modification time) + -d create directories instead of installing files. + -g GROUP $chgrpprog installed files to GROUP. + -m MODE $chmodprog installed files to MODE. + -o USER $chownprog installed files to USER. + -s $stripprog installed files. + -t DIRECTORY install into DIRECTORY. + -T report an error if DSTFILE is a directory. Environment variables override the default commands: - CHGRPPROG CHMODPROG CHOWNPROG CPPROG MKDIRPROG MVPROG RMPROG STRIPPROG + CHGRPPROG CHMODPROG CHOWNPROG CMPPROG CPPROG MKDIRPROG MVPROG + RMPROG STRIPPROG " while test $# -ne 0; do case $1 in - -c) shift - continue;; + -c) ;; + + -C) copy_on_change=true;; - -d) dir_arg=true - shift - continue;; + -d) dir_arg=true;; -g) chgrpcmd="$chgrpprog $2" - shift - shift - continue;; + shift;; --help) echo "$usage"; exit $?;; -m) mode=$2 - shift - shift case $mode in *' '* | *' '* | *' '* | *'*'* | *'?'* | *'['*) echo "$0: invalid mode: $mode" >&2 exit 1;; esac - continue;; + shift;; -o) chowncmd="$chownprog $2" - shift - shift - continue;; - - -s) stripcmd=$stripprog - shift - continue;; - - -t) dstarg=$2 - shift - shift - continue;; - - -T) no_target_directory=true - shift - continue;; + shift;; + + -s) stripcmd=$stripprog;; + + -t) dst_arg=$2 + # Protect names problematic for `test' and other utilities. + case $dst_arg in + -* | [=\(\)!]) dst_arg=./$dst_arg;; + esac + shift;; + + -T) no_target_directory=true;; --version) echo "$0 $scriptversion"; exit $?;; @@ -165,21 +174,26 @@ *) break;; esac + shift done -if test $# -ne 0 && test -z "$dir_arg$dstarg"; then +if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then # When -d is used, all remaining arguments are directories to create. # When -t is used, the destination is already specified. # Otherwise, the last argument is the destination. Remove it from $@. for arg do - if test -n "$dstarg"; then + if test -n "$dst_arg"; then # $@ is not empty: it contains at least $arg. - set fnord "$@" "$dstarg" + set fnord "$@" "$dst_arg" shift # fnord fi shift # arg - dstarg=$arg + dst_arg=$arg + # Protect names problematic for `test' and other utilities. + case $dst_arg in + -* | [=\(\)!]) dst_arg=./$dst_arg;; + esac done fi @@ -194,7 +208,11 @@ fi if test -z "$dir_arg"; then - trap '(exit $?); exit' 1 2 13 15 + do_exit='(exit $ret); exit $ret' + trap "ret=129; $do_exit" 1 + trap "ret=130; $do_exit" 2 + trap "ret=141; $do_exit" 13 + trap "ret=143; $do_exit" 15 # Set umask so as not to create temps with too-generous modes. # However, 'strip' requires both read and write access to temps. @@ -222,9 +240,9 @@ for src do - # Protect names starting with `-'. + # Protect names problematic for `test' and other utilities. case $src in - -*) src=./$src ;; + -* | [=\(\)!]) src=./$src;; esac if test -n "$dir_arg"; then @@ -242,22 +260,17 @@ exit 1 fi - if test -z "$dstarg"; then + if test -z "$dst_arg"; then echo "$0: no destination specified." >&2 exit 1 fi - - dst=$dstarg - # Protect names starting with `-'. - case $dst in - -*) dst=./$dst ;; - esac + dst=$dst_arg # If destination is a directory, append the input filename; won't work # if double slashes aren't ignored. if test -d "$dst"; then if test -n "$no_target_directory"; then - echo "$0: $dstarg: Is a directory" >&2 + echo "$0: $dst_arg: Is a directory" >&2 exit 1 fi dstdir=$dst @@ -378,33 +391,26 @@ # directory the slow way, step by step, checking for races as we go. case $dstdir in - /*) prefix=/ ;; - -*) prefix=./ ;; - *) prefix= ;; + /*) prefix='/';; + [-=\(\)!]*) prefix='./';; + *) prefix='';; esac - case $posix_glob in - '') - if (set -f) 2>/dev/null; then - posix_glob=true - else - posix_glob=false - fi ;; - esac + eval "$initialize_posix_glob" oIFS=$IFS IFS=/ - $posix_glob && set -f + $posix_glob set -f set fnord $dstdir shift - $posix_glob && set +f + $posix_glob set +f IFS=$oIFS prefixes= for d do - test -z "$d" && continue + test X"$d" = X && continue prefix=$prefix$d if test -d "$prefix"; then @@ -459,41 +465,54 @@ # ignore errors from any of these, just make sure not to ignore # errors from the above "$doit $cpprog $src $dsttmp" command. # - { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } \ - && { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } \ - && { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } \ - && { test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } && - - # Now rename the file to the real destination. - { $doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null \ - || { - # The rename failed, perhaps because mv can't rename something else - # to itself, or perhaps because mv is so ancient that it does not - # support -f. - - # Now remove or move aside any old file at destination location. - # We try this two ways since rm can't unlink itself on some - # systems and the destination file might be busy for other - # reasons. In this case, the final cleanup might fail but the new - # file should still install successfully. - { - if test -f "$dst"; then - $doit $rmcmd -f "$dst" 2>/dev/null \ - || { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null \ - && { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; }; }\ - || { - echo "$0: cannot unlink or rename $dst" >&2 - (exit 1); exit 1 - } - else - : - fi - } && - - # Now rename the file to the real destination. - $doit $mvcmd "$dsttmp" "$dst" - } - } || exit 1 + { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } && + { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } && + { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } && + { test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } && + + # If -C, don't bother to copy if it wouldn't change the file. + if $copy_on_change && + old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` && + new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` && + + eval "$initialize_posix_glob" && + $posix_glob set -f && + set X $old && old=:$2:$4:$5:$6 && + set X $new && new=:$2:$4:$5:$6 && + $posix_glob set +f && + + test "$old" = "$new" && + $cmpprog "$dst" "$dsttmp" >/dev/null 2>&1 + then + rm -f "$dsttmp" + else + # Rename the file to the real destination. + $doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null || + + # The rename failed, perhaps because mv can't rename something else + # to itself, or perhaps because mv is so ancient that it does not + # support -f. + { + # Now remove or move aside any old file at destination location. + # We try this two ways since rm can't unlink itself on some + # systems and the destination file might be busy for other + # reasons. In this case, the final cleanup might fail but the new + # file should still install successfully. + { + test ! -f "$dst" || + $doit $rmcmd -f "$dst" 2>/dev/null || + { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null && + { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; } + } || + { echo "$0: cannot unlink or rename $dst" >&2 + (exit 1); exit 1 + } + } && + + # Now rename the file to the real destination. + $doit $mvcmd "$dsttmp" "$dst" + } + fi || exit 1 trap '' 0 fi @@ -503,5 +522,6 @@ # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "scriptversion=" # time-stamp-format: "%:y-%02m-%02d.%02H" -# time-stamp-end: "$" +# time-stamp-time-zone: "UTC" +# time-stamp-end: "; # UTC" # End: diff -Nru couchdb-1.2.0/build-aux/ltmain.sh couchdb-1.4.0~rc.1/build-aux/ltmain.sh --- couchdb-1.2.0/build-aux/ltmain.sh 2012-03-29 17:05:31.000000000 -0400 +++ couchdb-1.4.0~rc.1/build-aux/ltmain.sh 2013-08-23 10:57:22.000000000 -0400 @@ -1,6 +1,5 @@ -# Generated from ltmain.m4sh. -# libtool (GNU libtool) 2.2.10 +# libtool (GNU libtool) 2.4 # Written by Gordon Matzigkeit , 1996 # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, @@ -70,17 +69,19 @@ # compiler: $LTCC # compiler flags: $LTCFLAGS # linker: $LD (gnu? $with_gnu_ld) -# $progname: (GNU libtool) 2.2.10 +# $progname: (GNU libtool) 2.4 # automake: $automake_version # autoconf: $autoconf_version # # Report bugs to . +# GNU libtool home page: . +# General help using GNU software: . PROGRAM=libtool PACKAGE=libtool -VERSION=2.2.10 +VERSION=2.4 TIMESTAMP="" -package_revision=1.3175 +package_revision=1.3293 # Be Bourne compatible if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then @@ -135,15 +136,15 @@ : ${CP="cp -f"} test "${ECHO+set}" = set || ECHO=${as_echo-'printf %s\n'} -: ${EGREP="/usr/bin/grep -E"} -: ${FGREP="/usr/bin/grep -F"} -: ${GREP="/usr/bin/grep"} +: ${EGREP="grep -E"} +: ${FGREP="grep -F"} +: ${GREP="grep"} : ${LN_S="ln -s"} : ${MAKE="make"} : ${MKDIR="mkdir"} : ${MV="mv -f"} : ${RM="rm -f"} -: ${SED="/usr/bin/sed"} +: ${SED="sed"} : ${SHELL="${CONFIG_SHELL-/bin/sh}"} : ${Xsed="$SED -e 1s/^X//"} @@ -163,6 +164,27 @@ dirname="s,/[^/]*$,," basename="s,^.*/,," +# func_dirname file append nondir_replacement +# Compute the dirname of FILE. If nonempty, add APPEND to the result, +# otherwise set result to NONDIR_REPLACEMENT. +func_dirname () +{ + func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` + if test "X$func_dirname_result" = "X${1}"; then + func_dirname_result="${3}" + else + func_dirname_result="$func_dirname_result${2}" + fi +} # func_dirname may be replaced by extended shell implementation + + +# func_basename file +func_basename () +{ + func_basename_result=`$ECHO "${1}" | $SED "$basename"` +} # func_basename may be replaced by extended shell implementation + + # func_dirname_and_basename file append nondir_replacement # perform func_basename and func_dirname in a single function # call: @@ -177,17 +199,31 @@ # those functions but instead duplicate the functionality here. func_dirname_and_basename () { - # Extract subdirectory from the argument. - func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"` - if test "X$func_dirname_result" = "X${1}"; then - func_dirname_result="${3}" - else - func_dirname_result="$func_dirname_result${2}" - fi - func_basename_result=`$ECHO "${1}" | $SED -e "$basename"` -} + # Extract subdirectory from the argument. + func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"` + if test "X$func_dirname_result" = "X${1}"; then + func_dirname_result="${3}" + else + func_dirname_result="$func_dirname_result${2}" + fi + func_basename_result=`$ECHO "${1}" | $SED -e "$basename"` +} # func_dirname_and_basename may be replaced by extended shell implementation + + +# func_stripname prefix suffix name +# strip PREFIX and SUFFIX off of NAME. +# PREFIX and SUFFIX must not contain globbing or regex special +# characters, hashes, percent signs, but SUFFIX may contain a leading +# dot (in which case that matches only a dot). +# func_strip_suffix prefix name +func_stripname () +{ + case ${2} in + .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; + *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; + esac +} # func_stripname may be replaced by extended shell implementation -# Generated shell functions inserted here. # These SED scripts presuppose an absolute path with a trailing slash. pathcar='s,^/\([^/]*\).*$,\1,' @@ -370,6 +406,15 @@ # Same as above, but do not quote variable references. double_quote_subst='s/\(["`\\]\)/\\\1/g' +# Sed substitution that turns a string into a regex matching for the +# string literally. +sed_make_literal_regex='s,[].[^$\\*\/],\\&,g' + +# Sed substitution that converts a w32 file name or path +# which contains forward slashes, into one that contains +# (escaped) backslashes. A very naive implementation. +lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g' + # Re-`\' parameter expansions in output of double_quote_subst that were # `\'-ed in input to the same. If an odd number of `\' preceded a '$' # in input to double_quote_subst, that '$' was protected from expansion. @@ -398,7 +443,7 @@ # name if it has been set yet. func_echo () { - $ECHO "$progname${mode+: }$mode: $*" + $ECHO "$progname: ${opt_mode+$opt_mode: }$*" } # func_verbose arg... @@ -424,14 +469,14 @@ # Echo program name prefixed message to standard error. func_error () { - $ECHO "$progname${mode+: }$mode: "${1+"$@"} 1>&2 + $ECHO "$progname: ${opt_mode+$opt_mode: }"${1+"$@"} 1>&2 } # func_warning arg... # Echo program name prefixed warning message to standard error. func_warning () { - $opt_warning && $ECHO "$progname${mode+: }$mode: warning: "${1+"$@"} 1>&2 + $opt_warning && $ECHO "$progname: ${opt_mode+$opt_mode: }warning: "${1+"$@"} 1>&2 # bash bug again: : @@ -650,11 +695,30 @@ fi } +# func_tr_sh +# Turn $1 into a string suitable for a shell variable name. +# Result is stored in $func_tr_sh_result. All characters +# not in the set a-zA-Z0-9_ are replaced with '_'. Further, +# if $1 begins with a digit, a '_' is prepended as well. +func_tr_sh () +{ + case $1 in + [0-9]* | *[!a-zA-Z0-9_]*) + func_tr_sh_result=`$ECHO "$1" | $SED 's/^\([0-9]\)/_\1/; s/[^a-zA-Z0-9_]/_/g'` + ;; + * ) + func_tr_sh_result=$1 + ;; + esac +} + # func_version # Echo version message to standard output and exit. func_version () { + $opt_debug + $SED -n '/(C)/!b go :more /\./!{ @@ -676,6 +740,8 @@ # Echo short help message to standard output and exit. func_usage () { + $opt_debug + $SED -n '/^# Usage:/,/^# *.*--help/ { s/^# // s/^# *$// @@ -692,7 +758,10 @@ # unless 'noexit' is passed as argument. func_help () { + $opt_debug + $SED -n '/^# Usage:/,/# Report bugs to/ { + :print s/^# // s/^# *$// s*\$progname*'$progname'* @@ -705,7 +774,11 @@ s/\$automake_version/'"`(automake --version) 2>/dev/null |$SED 1q`"'/ s/\$autoconf_version/'"`(autoconf --version) 2>/dev/null |$SED 1q`"'/ p - }' < "$progpath" + d + } + /^# .* home page:/b print + /^# General help using/b print + ' < "$progpath" ret=$? if test -z "$1"; then exit $ret @@ -717,12 +790,39 @@ # exit_cmd. func_missing_arg () { + $opt_debug + func_error "missing argument for $1." exit_cmd=exit } -exit_cmd=: +# func_split_short_opt shortopt +# Set func_split_short_opt_name and func_split_short_opt_arg shell +# variables after splitting SHORTOPT after the 2nd character. +func_split_short_opt () +{ + my_sed_short_opt='1s/^\(..\).*$/\1/;q' + my_sed_short_rest='1s/^..\(.*\)$/\1/;q' + + func_split_short_opt_name=`$ECHO "$1" | $SED "$my_sed_short_opt"` + func_split_short_opt_arg=`$ECHO "$1" | $SED "$my_sed_short_rest"` +} # func_split_short_opt may be replaced by extended shell implementation + + +# func_split_long_opt longopt +# Set func_split_long_opt_name and func_split_long_opt_arg shell +# variables after splitting LONGOPT at the `=' sign. +func_split_long_opt () +{ + my_sed_long_opt='1s/^\(--[^=]*\)=.*/\1/;q' + my_sed_long_arg='1s/^--[^=]*=//' + + func_split_long_opt_name=`$ECHO "$1" | $SED "$my_sed_long_opt"` + func_split_long_opt_arg=`$ECHO "$1" | $SED "$my_sed_long_arg"` +} # func_split_long_opt may be replaced by extended shell implementation + +exit_cmd=: @@ -732,25 +832,64 @@ magic_exe="%%%MAGIC EXE variable%%%" # Global variables. -# $mode is unset nonopt= -execute_dlfiles= preserve_args= lo2o="s/\\.lo\$/.${objext}/" o2lo="s/\\.${objext}\$/.lo/" extracted_archives= extracted_serial=0 -opt_dry_run=false -opt_duplicate_deps=false -opt_silent=false -opt_debug=: - # If this variable is set in any of the actions, the command in it # will be execed at the end. This prevents here-documents from being # left over by shells. exec_cmd= +# func_append var value +# Append VALUE to the end of shell variable VAR. +func_append () +{ + eval "${1}=\$${1}\${2}" +} # func_append may be replaced by extended shell implementation + +# func_append_quoted var value +# Quote VALUE and append to the end of shell variable VAR, separated +# by a space. +func_append_quoted () +{ + func_quote_for_eval "${2}" + eval "${1}=\$${1}\\ \$func_quote_for_eval_result" +} # func_append_quoted may be replaced by extended shell implementation + + +# func_arith arithmetic-term... +func_arith () +{ + func_arith_result=`expr "${@}"` +} # func_arith may be replaced by extended shell implementation + + +# func_len string +# STRING may not start with a hyphen. +func_len () +{ + func_len_result=`expr "${1}" : ".*" 2>/dev/null || echo $max_cmd_len` +} # func_len may be replaced by extended shell implementation + + +# func_lo2o object +func_lo2o () +{ + func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` +} # func_lo2o may be replaced by extended shell implementation + + +# func_xform libobj-or-source +func_xform () +{ + func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` +} # func_xform may be replaced by extended shell implementation + + # func_fatal_configuration arg... # Echo program name prefixed message to standard error, followed by # a configuration failure hint, and exit. @@ -840,129 +979,204 @@ esac } -# Parse options once, thoroughly. This comes as soon as possible in -# the script to make things like `libtool --version' happen quickly. +# func_check_version_match +# Ensure that we are using m4 macros, and libtool script from the same +# release of libtool. +func_check_version_match () { + if test "$package_revision" != "$macro_revision"; then + if test "$VERSION" != "$macro_version"; then + if test -z "$macro_version"; then + cat >&2 <<_LT_EOF +$progname: Version mismatch error. This is $PACKAGE $VERSION, but the +$progname: definition of this LT_INIT comes from an older release. +$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION +$progname: and run autoconf again. +_LT_EOF + else + cat >&2 <<_LT_EOF +$progname: Version mismatch error. This is $PACKAGE $VERSION, but the +$progname: definition of this LT_INIT comes from $PACKAGE $macro_version. +$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION +$progname: and run autoconf again. +_LT_EOF + fi + else + cat >&2 <<_LT_EOF +$progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision, +$progname: but the definition of this LT_INIT comes from revision $macro_revision. +$progname: You should recreate aclocal.m4 with macros from revision $package_revision +$progname: of $PACKAGE $VERSION and run autoconf again. +_LT_EOF + fi + + exit $EXIT_MISMATCH + fi +} + + +# Shorthand for --mode=foo, only valid as the first argument +case $1 in +clean|clea|cle|cl) + shift; set dummy --mode clean ${1+"$@"}; shift + ;; +compile|compil|compi|comp|com|co|c) + shift; set dummy --mode compile ${1+"$@"}; shift + ;; +execute|execut|execu|exec|exe|ex|e) + shift; set dummy --mode execute ${1+"$@"}; shift + ;; +finish|finis|fini|fin|fi|f) + shift; set dummy --mode finish ${1+"$@"}; shift + ;; +install|instal|insta|inst|ins|in|i) + shift; set dummy --mode install ${1+"$@"}; shift + ;; +link|lin|li|l) + shift; set dummy --mode link ${1+"$@"}; shift + ;; +uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u) + shift; set dummy --mode uninstall ${1+"$@"}; shift + ;; +esac + + + +# Option defaults: +opt_debug=: +opt_dry_run=false +opt_config=false +opt_preserve_dup_deps=false +opt_features=false +opt_finish=false +opt_help=false +opt_help_all=false +opt_silent=: +opt_verbose=: +opt_silent=false +opt_verbose=false - # Shorthand for --mode=foo, only valid as the first argument - case $1 in - clean|clea|cle|cl) - shift; set dummy --mode clean ${1+"$@"}; shift - ;; - compile|compil|compi|comp|com|co|c) - shift; set dummy --mode compile ${1+"$@"}; shift - ;; - execute|execut|execu|exec|exe|ex|e) - shift; set dummy --mode execute ${1+"$@"}; shift - ;; - finish|finis|fini|fin|fi|f) - shift; set dummy --mode finish ${1+"$@"}; shift - ;; - install|instal|insta|inst|ins|in|i) - shift; set dummy --mode install ${1+"$@"}; shift - ;; - link|lin|li|l) - shift; set dummy --mode link ${1+"$@"}; shift - ;; - uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u) - shift; set dummy --mode uninstall ${1+"$@"}; shift - ;; - esac - # Parse non-mode specific arguments: - while test "$#" -gt 0; do +# Parse options once, thoroughly. This comes as soon as possible in the +# script to make things like `--version' happen as quickly as we can. +{ + # this just eases exit handling + while test $# -gt 0; do opt="$1" shift - case $opt in - --config) func_config ;; - - --debug) preserve_args="$preserve_args $opt" + --debug|-x) opt_debug='set -x' func_echo "enabling shell trace mode" - opt_debug='set -x' $opt_debug ;; - - -dlopen) test "$#" -eq 0 && func_missing_arg "$opt" && break - execute_dlfiles="$execute_dlfiles $1" - shift + --dry-run|--dryrun|-n) + opt_dry_run=: ;; - - --dry-run | -n) opt_dry_run=: ;; - --features) func_features ;; - --finish) mode="finish" ;; - - --mode) test "$#" -eq 0 && func_missing_arg "$opt" && break - case $1 in - # Valid mode arguments: - clean) ;; - compile) ;; - execute) ;; - finish) ;; - install) ;; - link) ;; - relink) ;; - uninstall) ;; - - # Catch anything else as an error - *) func_error "invalid argument for $opt" - exit_cmd=exit - break - ;; - esac - - mode="$1" + --config) + opt_config=: +func_config + ;; + --dlopen|-dlopen) + optarg="$1" + opt_dlopen="${opt_dlopen+$opt_dlopen +}$optarg" shift ;; - --preserve-dup-deps) - opt_duplicate_deps=: ;; - - --quiet|--silent) preserve_args="$preserve_args $opt" - opt_silent=: - opt_verbose=false + opt_preserve_dup_deps=: ;; - - --no-quiet|--no-silent) - preserve_args="$preserve_args $opt" - opt_silent=false + --features) + opt_features=: +func_features ;; - - --verbose| -v) preserve_args="$preserve_args $opt" + --finish) + opt_finish=: +set dummy --mode finish ${1+"$@"}; shift + ;; + --help) + opt_help=: + ;; + --help-all) + opt_help_all=: +opt_help=': help-all' + ;; + --mode) + test $# = 0 && func_missing_arg $opt && break + optarg="$1" + opt_mode="$optarg" +case $optarg in + # Valid mode arguments: + clean|compile|execute|finish|install|link|relink|uninstall) ;; + + # Catch anything else as an error + *) func_error "invalid argument for $opt" + exit_cmd=exit + break + ;; +esac + shift + ;; + --no-silent|--no-quiet) opt_silent=false - opt_verbose=: +func_append preserve_args " $opt" ;; - - --no-verbose) preserve_args="$preserve_args $opt" + --no-verbose) opt_verbose=false +func_append preserve_args " $opt" ;; - - --tag) test "$#" -eq 0 && func_missing_arg "$opt" && break - preserve_args="$preserve_args $opt $1" - func_enable_tag "$1" # tagname is set here + --silent|--quiet) + opt_silent=: +func_append preserve_args " $opt" + opt_verbose=false + ;; + --verbose|-v) + opt_verbose=: +func_append preserve_args " $opt" +opt_silent=false + ;; + --tag) + test $# = 0 && func_missing_arg $opt && break + optarg="$1" + opt_tag="$optarg" +func_append preserve_args " $opt $optarg" +func_enable_tag "$optarg" shift ;; + -\?|-h) func_usage ;; + --help) func_help ;; + --version) func_version ;; + # Separate optargs to long options: - -dlopen=*|--mode=*|--tag=*) - func_opt_split "$opt" - set dummy "$func_opt_split_opt" "$func_opt_split_arg" ${1+"$@"} + --*=*) + func_split_long_opt "$opt" + set dummy "$func_split_long_opt_name" "$func_split_long_opt_arg" ${1+"$@"} shift ;; - -\?|-h) func_usage ;; - --help) opt_help=: ;; - --help-all) opt_help=': help-all' ;; - --version) func_version ;; - - -*) func_fatal_help "unrecognized option \`$opt'" ;; - - *) nonopt="$opt" - break + # Separate non-argument short options: + -\?*|-h*|-n*|-v*) + func_split_short_opt "$opt" + set dummy "$func_split_short_opt_name" "-$func_split_short_opt_arg" ${1+"$@"} + shift ;; + + --) break ;; + -*) func_fatal_help "unrecognized option \`$opt'" ;; + *) set dummy "$opt" ${1+"$@"}; shift; break ;; esac done + # Validate options: + + # save first non-option argument + if test "$#" -gt 0; then + nonopt="$opt" + shift + fi + + # preserve --debug + test "$opt_debug" = : || func_append preserve_args " --debug" case $host in *cygwin* | *mingw* | *pw32* | *cegcc*) @@ -970,82 +1184,44 @@ opt_duplicate_compiler_generated_deps=: ;; *) - opt_duplicate_compiler_generated_deps=$opt_duplicate_deps + opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps ;; esac - # Having warned about all mis-specified options, bail out if - # anything was wrong. - $exit_cmd $EXIT_FAILURE -} + $opt_help || { + # Sanity checks first: + func_check_version_match -# func_check_version_match -# Ensure that we are using m4 macros, and libtool script from the same -# release of libtool. -func_check_version_match () -{ - if test "$package_revision" != "$macro_revision"; then - if test "$VERSION" != "$macro_version"; then - if test -z "$macro_version"; then - cat >&2 <<_LT_EOF -$progname: Version mismatch error. This is $PACKAGE $VERSION, but the -$progname: definition of this LT_INIT comes from an older release. -$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION -$progname: and run autoconf again. -_LT_EOF - else - cat >&2 <<_LT_EOF -$progname: Version mismatch error. This is $PACKAGE $VERSION, but the -$progname: definition of this LT_INIT comes from $PACKAGE $macro_version. -$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION -$progname: and run autoconf again. -_LT_EOF - fi - else - cat >&2 <<_LT_EOF -$progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision, -$progname: but the definition of this LT_INIT comes from revision $macro_revision. -$progname: You should recreate aclocal.m4 with macros from revision $package_revision -$progname: of $PACKAGE $VERSION and run autoconf again. -_LT_EOF + if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then + func_fatal_configuration "not configured to build any kind of library" fi - exit $EXIT_MISMATCH - fi -} - + # Darwin sucks + eval std_shrext=\"$shrext_cmds\" -## ----------- ## -## Main. ## -## ----------- ## - -$opt_help || { - # Sanity checks first: - func_check_version_match - - if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then - func_fatal_configuration "not configured to build any kind of library" - fi + # Only execute mode is allowed to have -dlopen flags. + if test -n "$opt_dlopen" && test "$opt_mode" != execute; then + func_error "unrecognized option \`-dlopen'" + $ECHO "$help" 1>&2 + exit $EXIT_FAILURE + fi - test -z "$mode" && func_fatal_error "error: you must specify a MODE." + # Change the help message to a mode-specific one. + generic_help="$help" + help="Try \`$progname --help --mode=$opt_mode' for more information." + } - # Darwin sucks - eval std_shrext=\"$shrext_cmds\" + # Bail if the options were screwed + $exit_cmd $EXIT_FAILURE +} - # Only execute mode is allowed to have -dlopen flags. - if test -n "$execute_dlfiles" && test "$mode" != execute; then - func_error "unrecognized option \`-dlopen'" - $ECHO "$help" 1>&2 - exit $EXIT_FAILURE - fi - # Change the help message to a mode-specific one. - generic_help="$help" - help="Try \`$progname --help --mode=$mode' for more information." -} +## ----------- ## +## Main. ## +## ----------- ## # func_lalib_p file # True iff FILE is a libtool `.la' library or `.lo' object file. @@ -1110,12 +1286,9 @@ # temporary ltwrapper_script. func_ltwrapper_scriptname () { - func_ltwrapper_scriptname_result="" - if func_ltwrapper_executable_p "$1"; then - func_dirname_and_basename "$1" "" "." - func_stripname '' '.exe' "$func_basename_result" - func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper" - fi + func_dirname_and_basename "$1" "" "." + func_stripname '' '.exe' "$func_basename_result" + func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper" } # func_ltwrapper_p file @@ -1161,6 +1334,37 @@ } +# func_resolve_sysroot PATH +# Replace a leading = in PATH with a sysroot. Store the result into +# func_resolve_sysroot_result +func_resolve_sysroot () +{ + func_resolve_sysroot_result=$1 + case $func_resolve_sysroot_result in + =*) + func_stripname '=' '' "$func_resolve_sysroot_result" + func_resolve_sysroot_result=$lt_sysroot$func_stripname_result + ;; + esac +} + +# func_replace_sysroot PATH +# If PATH begins with the sysroot, replace it with = and +# store the result into func_replace_sysroot_result. +func_replace_sysroot () +{ + case "$lt_sysroot:$1" in + ?*:"$lt_sysroot"*) + func_stripname "$lt_sysroot" '' "$1" + func_replace_sysroot_result="=$func_stripname_result" + ;; + *) + # Including no sysroot. + func_replace_sysroot_result=$1 + ;; + esac +} + # func_infer_tag arg # Infer tagged configuration to use if any are available and # if one wasn't chosen via the "--tag" command line option. @@ -1173,8 +1377,7 @@ if test -n "$available_tags" && test -z "$tagname"; then CC_quoted= for arg in $CC; do - func_quote_for_eval "$arg" - CC_quoted="$CC_quoted $func_quote_for_eval_result" + func_append_quoted CC_quoted "$arg" done CC_expanded=`func_echo_all $CC` CC_quoted_expanded=`func_echo_all $CC_quoted` @@ -1193,8 +1396,7 @@ CC_quoted= for arg in $CC; do # Double-quote args containing other shell metacharacters. - func_quote_for_eval "$arg" - CC_quoted="$CC_quoted $func_quote_for_eval_result" + func_append_quoted CC_quoted "$arg" done CC_expanded=`func_echo_all $CC` CC_quoted_expanded=`func_echo_all $CC_quoted` @@ -1226,42 +1428,522 @@ -# func_write_libtool_object output_name pic_name nonpic_name -# Create a libtool object file (analogous to a ".la" file), -# but don't create it if we're doing a dry run. -func_write_libtool_object () +# func_write_libtool_object output_name pic_name nonpic_name +# Create a libtool object file (analogous to a ".la" file), +# but don't create it if we're doing a dry run. +func_write_libtool_object () +{ + write_libobj=${1} + if test "$build_libtool_libs" = yes; then + write_lobj=\'${2}\' + else + write_lobj=none + fi + + if test "$build_old_libs" = yes; then + write_oldobj=\'${3}\' + else + write_oldobj=none + fi + + $opt_dry_run || { + cat >${write_libobj}T </dev/null` + if test "$?" -eq 0 && test -n "${func_convert_core_file_wine_to_w32_tmp}"; then + func_convert_core_file_wine_to_w32_result=`$ECHO "$func_convert_core_file_wine_to_w32_tmp" | + $SED -e "$lt_sed_naive_backslashify"` + else + func_convert_core_file_wine_to_w32_result= + fi + fi +} +# end: func_convert_core_file_wine_to_w32 + + +# func_convert_core_path_wine_to_w32 ARG +# Helper function used by path conversion functions when $build is *nix, and +# $host is mingw, cygwin, or some other w32 environment. Relies on a correctly +# configured wine environment available, with the winepath program in $build's +# $PATH. Assumes ARG has no leading or trailing path separator characters. +# +# ARG is path to be converted from $build format to win32. +# Result is available in $func_convert_core_path_wine_to_w32_result. +# Unconvertible file (directory) names in ARG are skipped; if no directory names +# are convertible, then the result may be empty. +func_convert_core_path_wine_to_w32 () +{ + $opt_debug + # unfortunately, winepath doesn't convert paths, only file names + func_convert_core_path_wine_to_w32_result="" + if test -n "$1"; then + oldIFS=$IFS + IFS=: + for func_convert_core_path_wine_to_w32_f in $1; do + IFS=$oldIFS + func_convert_core_file_wine_to_w32 "$func_convert_core_path_wine_to_w32_f" + if test -n "$func_convert_core_file_wine_to_w32_result" ; then + if test -z "$func_convert_core_path_wine_to_w32_result"; then + func_convert_core_path_wine_to_w32_result="$func_convert_core_file_wine_to_w32_result" + else + func_append func_convert_core_path_wine_to_w32_result ";$func_convert_core_file_wine_to_w32_result" + fi + fi + done + IFS=$oldIFS + fi +} +# end: func_convert_core_path_wine_to_w32 + + +# func_cygpath ARGS... +# Wrapper around calling the cygpath program via LT_CYGPATH. This is used when +# when (1) $build is *nix and Cygwin is hosted via a wine environment; or (2) +# $build is MSYS and $host is Cygwin, or (3) $build is Cygwin. In case (1) or +# (2), returns the Cygwin file name or path in func_cygpath_result (input +# file name or path is assumed to be in w32 format, as previously converted +# from $build's *nix or MSYS format). In case (3), returns the w32 file name +# or path in func_cygpath_result (input file name or path is assumed to be in +# Cygwin format). Returns an empty string on error. +# +# ARGS are passed to cygpath, with the last one being the file name or path to +# be converted. +# +# Specify the absolute *nix (or w32) name to cygpath in the LT_CYGPATH +# environment variable; do not put it in $PATH. +func_cygpath () +{ + $opt_debug + if test -n "$LT_CYGPATH" && test -f "$LT_CYGPATH"; then + func_cygpath_result=`$LT_CYGPATH "$@" 2>/dev/null` + if test "$?" -ne 0; then + # on failure, ensure result is empty + func_cygpath_result= + fi + else + func_cygpath_result= + func_error "LT_CYGPATH is empty or specifies non-existent file: \`$LT_CYGPATH'" + fi +} +#end: func_cygpath + + +# func_convert_core_msys_to_w32 ARG +# Convert file name or path ARG from MSYS format to w32 format. Return +# result in func_convert_core_msys_to_w32_result. +func_convert_core_msys_to_w32 () +{ + $opt_debug + # awkward: cmd appends spaces to result + func_convert_core_msys_to_w32_result=`( cmd //c echo "$1" ) 2>/dev/null | + $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"` +} +#end: func_convert_core_msys_to_w32 + + +# func_convert_file_check ARG1 ARG2 +# Verify that ARG1 (a file name in $build format) was converted to $host +# format in ARG2. Otherwise, emit an error message, but continue (resetting +# func_to_host_file_result to ARG1). +func_convert_file_check () +{ + $opt_debug + if test -z "$2" && test -n "$1" ; then + func_error "Could not determine host file name corresponding to" + func_error " \`$1'" + func_error "Continuing, but uninstalled executables may not work." + # Fallback: + func_to_host_file_result="$1" + fi +} +# end func_convert_file_check + + +# func_convert_path_check FROM_PATHSEP TO_PATHSEP FROM_PATH TO_PATH +# Verify that FROM_PATH (a path in $build format) was converted to $host +# format in TO_PATH. Otherwise, emit an error message, but continue, resetting +# func_to_host_file_result to a simplistic fallback value (see below). +func_convert_path_check () +{ + $opt_debug + if test -z "$4" && test -n "$3"; then + func_error "Could not determine the host path corresponding to" + func_error " \`$3'" + func_error "Continuing, but uninstalled executables may not work." + # Fallback. This is a deliberately simplistic "conversion" and + # should not be "improved". See libtool.info. + if test "x$1" != "x$2"; then + lt_replace_pathsep_chars="s|$1|$2|g" + func_to_host_path_result=`echo "$3" | + $SED -e "$lt_replace_pathsep_chars"` + else + func_to_host_path_result="$3" + fi + fi +} +# end func_convert_path_check + + +# func_convert_path_front_back_pathsep FRONTPAT BACKPAT REPL ORIG +# Modifies func_to_host_path_result by prepending REPL if ORIG matches FRONTPAT +# and appending REPL if ORIG matches BACKPAT. +func_convert_path_front_back_pathsep () +{ + $opt_debug + case $4 in + $1 ) func_to_host_path_result="$3$func_to_host_path_result" + ;; + esac + case $4 in + $2 ) func_append func_to_host_path_result "$3" + ;; + esac +} +# end func_convert_path_front_back_pathsep + + +################################################## +# $build to $host FILE NAME CONVERSION FUNCTIONS # +################################################## +# invoked via `$to_host_file_cmd ARG' +# +# In each case, ARG is the path to be converted from $build to $host format. +# Result will be available in $func_to_host_file_result. + + +# func_to_host_file ARG +# Converts the file name ARG from $build format to $host format. Return result +# in func_to_host_file_result. +func_to_host_file () +{ + $opt_debug + $to_host_file_cmd "$1" +} +# end func_to_host_file + + +# func_to_tool_file ARG LAZY +# converts the file name ARG from $build format to toolchain format. Return +# result in func_to_tool_file_result. If the conversion in use is listed +# in (the comma separated) LAZY, no conversion takes place. +func_to_tool_file () +{ + $opt_debug + case ,$2, in + *,"$to_tool_file_cmd",*) + func_to_tool_file_result=$1 + ;; + *) + $to_tool_file_cmd "$1" + func_to_tool_file_result=$func_to_host_file_result + ;; + esac +} +# end func_to_tool_file + + +# func_convert_file_noop ARG +# Copy ARG to func_to_host_file_result. +func_convert_file_noop () +{ + func_to_host_file_result="$1" +} +# end func_convert_file_noop + + +# func_convert_file_msys_to_w32 ARG +# Convert file name ARG from (mingw) MSYS to (mingw) w32 format; automatic +# conversion to w32 is not available inside the cwrapper. Returns result in +# func_to_host_file_result. +func_convert_file_msys_to_w32 () +{ + $opt_debug + func_to_host_file_result="$1" + if test -n "$1"; then + func_convert_core_msys_to_w32 "$1" + func_to_host_file_result="$func_convert_core_msys_to_w32_result" + fi + func_convert_file_check "$1" "$func_to_host_file_result" +} +# end func_convert_file_msys_to_w32 + + +# func_convert_file_cygwin_to_w32 ARG +# Convert file name ARG from Cygwin to w32 format. Returns result in +# func_to_host_file_result. +func_convert_file_cygwin_to_w32 () +{ + $opt_debug + func_to_host_file_result="$1" + if test -n "$1"; then + # because $build is cygwin, we call "the" cygpath in $PATH; no need to use + # LT_CYGPATH in this case. + func_to_host_file_result=`cygpath -m "$1"` + fi + func_convert_file_check "$1" "$func_to_host_file_result" +} +# end func_convert_file_cygwin_to_w32 + + +# func_convert_file_nix_to_w32 ARG +# Convert file name ARG from *nix to w32 format. Requires a wine environment +# and a working winepath. Returns result in func_to_host_file_result. +func_convert_file_nix_to_w32 () +{ + $opt_debug + func_to_host_file_result="$1" + if test -n "$1"; then + func_convert_core_file_wine_to_w32 "$1" + func_to_host_file_result="$func_convert_core_file_wine_to_w32_result" + fi + func_convert_file_check "$1" "$func_to_host_file_result" +} +# end func_convert_file_nix_to_w32 + + +# func_convert_file_msys_to_cygwin ARG +# Convert file name ARG from MSYS to Cygwin format. Requires LT_CYGPATH set. +# Returns result in func_to_host_file_result. +func_convert_file_msys_to_cygwin () +{ + $opt_debug + func_to_host_file_result="$1" + if test -n "$1"; then + func_convert_core_msys_to_w32 "$1" + func_cygpath -u "$func_convert_core_msys_to_w32_result" + func_to_host_file_result="$func_cygpath_result" + fi + func_convert_file_check "$1" "$func_to_host_file_result" +} +# end func_convert_file_msys_to_cygwin + + +# func_convert_file_nix_to_cygwin ARG +# Convert file name ARG from *nix to Cygwin format. Requires Cygwin installed +# in a wine environment, working winepath, and LT_CYGPATH set. Returns result +# in func_to_host_file_result. +func_convert_file_nix_to_cygwin () +{ + $opt_debug + func_to_host_file_result="$1" + if test -n "$1"; then + # convert from *nix to w32, then use cygpath to convert from w32 to cygwin. + func_convert_core_file_wine_to_w32 "$1" + func_cygpath -u "$func_convert_core_file_wine_to_w32_result" + func_to_host_file_result="$func_cygpath_result" + fi + func_convert_file_check "$1" "$func_to_host_file_result" +} +# end func_convert_file_nix_to_cygwin + + +############################################# +# $build to $host PATH CONVERSION FUNCTIONS # +############################################# +# invoked via `$to_host_path_cmd ARG' +# +# In each case, ARG is the path to be converted from $build to $host format. +# The result will be available in $func_to_host_path_result. +# +# Path separators are also converted from $build format to $host format. If +# ARG begins or ends with a path separator character, it is preserved (but +# converted to $host format) on output. +# +# All path conversion functions are named using the following convention: +# file name conversion function : func_convert_file_X_to_Y () +# path conversion function : func_convert_path_X_to_Y () +# where, for any given $build/$host combination the 'X_to_Y' value is the +# same. If conversion functions are added for new $build/$host combinations, +# the two new functions must follow this pattern, or func_init_to_host_path_cmd +# will break. + + +# func_init_to_host_path_cmd +# Ensures that function "pointer" variable $to_host_path_cmd is set to the +# appropriate value, based on the value of $to_host_file_cmd. +to_host_path_cmd= +func_init_to_host_path_cmd () +{ + $opt_debug + if test -z "$to_host_path_cmd"; then + func_stripname 'func_convert_file_' '' "$to_host_file_cmd" + to_host_path_cmd="func_convert_path_${func_stripname_result}" + fi +} + + +# func_to_host_path ARG +# Converts the path ARG from $build format to $host format. Return result +# in func_to_host_path_result. +func_to_host_path () +{ + $opt_debug + func_init_to_host_path_cmd + $to_host_path_cmd "$1" +} +# end func_to_host_path + + +# func_convert_path_noop ARG +# Copy ARG to func_to_host_path_result. +func_convert_path_noop () { - write_libobj=${1} - if test "$build_libtool_libs" = yes; then - write_lobj=\'${2}\' - else - write_lobj=none - fi + func_to_host_path_result="$1" +} +# end func_convert_path_noop - if test "$build_old_libs" = yes; then - write_oldobj=\'${3}\' - else - write_oldobj=none - fi - $opt_dry_run || { - cat >${write_libobj}T < "$lockfile" fi $opt_dry_run || $RM $removelist - removelist="$removelist $lockfile" + func_append removelist " $lockfile" trap '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' 1 2 15 - if test -n "$fix_srcfile_path"; then - eval srcfile=\"$fix_srcfile_path\" - fi + func_to_tool_file "$srcfile" func_convert_file_msys_to_w32 + srcfile=$func_to_tool_file_result func_quote_for_eval "$srcfile" qsrcfile=$func_quote_for_eval_result @@ -1515,7 +2194,7 @@ if test -z "$output_obj"; then # Place PIC objects in $objdir - command="$command -o $lobj" + func_append command " -o $lobj" fi func_show_eval_locale "$command" \ @@ -1562,11 +2241,11 @@ command="$base_compile $qsrcfile $pic_flag" fi if test "$compiler_c_o" = yes; then - command="$command -o $obj" + func_append command " -o $obj" fi # Suppress compiler output if we already did a PIC compilation. - command="$command$suppress_output" + func_append command "$suppress_output" func_show_eval_locale "$command" \ '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' @@ -1611,13 +2290,13 @@ } $opt_help || { - test "$mode" = compile && func_mode_compile ${1+"$@"} + test "$opt_mode" = compile && func_mode_compile ${1+"$@"} } func_mode_help () { # We need to display help for each of the modes. - case $mode in + case $opt_mode in "") # Generic help is extracted from the usage comments # at the start of this file. @@ -1793,7 +2472,7 @@ ;; *) - func_fatal_help "invalid operation mode \`$mode'" + func_fatal_help "invalid operation mode \`$opt_mode'" ;; esac @@ -1808,13 +2487,13 @@ else { func_help noexit - for mode in compile link execute install finish uninstall clean; do + for opt_mode in compile link execute install finish uninstall clean; do func_mode_help done } | sed -n '1p; 2,$s/^Usage:/ or: /p' { func_help noexit - for mode in compile link execute install finish uninstall clean; do + for opt_mode in compile link execute install finish uninstall clean; do echo func_mode_help done @@ -1843,13 +2522,16 @@ func_fatal_help "you must specify a COMMAND" # Handle -dlopen flags immediately. - for file in $execute_dlfiles; do + for file in $opt_dlopen; do test -f "$file" \ || func_fatal_help "\`$file' is not a file" dir= case $file in *.la) + func_resolve_sysroot "$file" + file=$func_resolve_sysroot_result + # Check to see that this really is a libtool archive. func_lalib_unsafe_p "$file" \ || func_fatal_help "\`$lib' is not a valid libtool archive" @@ -1871,7 +2553,7 @@ dir="$func_dirname_result" if test -f "$dir/$objdir/$dlname"; then - dir="$dir/$objdir" + func_append dir "/$objdir" else if test ! -f "$dir/$dlname"; then func_fatal_error "cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'" @@ -1928,8 +2610,7 @@ ;; esac # Quote arguments (to preserve shell metacharacters). - func_quote_for_eval "$file" - args="$args $func_quote_for_eval_result" + func_append_quoted args "$file" done if test "X$opt_dry_run" = Xfalse; then @@ -1961,22 +2642,59 @@ fi } -test "$mode" = execute && func_mode_execute ${1+"$@"} +test "$opt_mode" = execute && func_mode_execute ${1+"$@"} # func_mode_finish arg... func_mode_finish () { $opt_debug - libdirs="$nonopt" + libs= + libdirs= admincmds= - if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then - for dir - do - libdirs="$libdirs $dir" - done + for opt in "$nonopt" ${1+"$@"} + do + if test -d "$opt"; then + func_append libdirs " $opt" + + elif test -f "$opt"; then + if func_lalib_unsafe_p "$opt"; then + func_append libs " $opt" + else + func_warning "\`$opt' is not a valid libtool archive" + fi + + else + func_fatal_error "invalid argument \`$opt'" + fi + done + + if test -n "$libs"; then + if test -n "$lt_sysroot"; then + sysroot_regex=`$ECHO "$lt_sysroot" | $SED "$sed_make_literal_regex"` + sysroot_cmd="s/\([ ']\)$sysroot_regex/\1/g;" + else + sysroot_cmd= + fi + # Remove sysroot references + if $opt_dry_run; then + for lib in $libs; do + echo "removing references to $lt_sysroot and \`=' prefixes from $lib" + done + else + tmpdir=`func_mktempdir` + for lib in $libs; do + sed -e "${sysroot_cmd} s/\([ ']-[LR]\)=/\1/g; s/\([ ']\)=/\1/g" $lib \ + > $tmpdir/tmp-la + mv -f $tmpdir/tmp-la $lib + done + ${RM}r "$tmpdir" + fi + fi + + if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then for libdir in $libdirs; do if test -n "$finish_cmds"; then # Do each command in the finish commands. @@ -1986,7 +2704,7 @@ if test -n "$finish_eval"; then # Do the single finish_eval. eval cmds=\"$finish_eval\" - $opt_dry_run || eval "$cmds" || admincmds="$admincmds + $opt_dry_run || eval "$cmds" || func_append admincmds " $cmds" fi done @@ -1995,53 +2713,55 @@ # Exit here if they wanted silent mode. $opt_silent && exit $EXIT_SUCCESS - echo "----------------------------------------------------------------------" - echo "Libraries have been installed in:" - for libdir in $libdirs; do - $ECHO " $libdir" - done - echo - echo "If you ever happen to want to link against installed libraries" - echo "in a given directory, LIBDIR, you must either use libtool, and" - echo "specify the full pathname of the library, or use the \`-LLIBDIR'" - echo "flag during linking and do at least one of the following:" - if test -n "$shlibpath_var"; then - echo " - add LIBDIR to the \`$shlibpath_var' environment variable" - echo " during execution" - fi - if test -n "$runpath_var"; then - echo " - add LIBDIR to the \`$runpath_var' environment variable" - echo " during linking" - fi - if test -n "$hardcode_libdir_flag_spec"; then - libdir=LIBDIR - eval flag=\"$hardcode_libdir_flag_spec\" + if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then + echo "----------------------------------------------------------------------" + echo "Libraries have been installed in:" + for libdir in $libdirs; do + $ECHO " $libdir" + done + echo + echo "If you ever happen to want to link against installed libraries" + echo "in a given directory, LIBDIR, you must either use libtool, and" + echo "specify the full pathname of the library, or use the \`-LLIBDIR'" + echo "flag during linking and do at least one of the following:" + if test -n "$shlibpath_var"; then + echo " - add LIBDIR to the \`$shlibpath_var' environment variable" + echo " during execution" + fi + if test -n "$runpath_var"; then + echo " - add LIBDIR to the \`$runpath_var' environment variable" + echo " during linking" + fi + if test -n "$hardcode_libdir_flag_spec"; then + libdir=LIBDIR + eval flag=\"$hardcode_libdir_flag_spec\" - $ECHO " - use the \`$flag' linker flag" - fi - if test -n "$admincmds"; then - $ECHO " - have your system administrator run these commands:$admincmds" - fi - if test -f /etc/ld.so.conf; then - echo " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'" - fi - echo + $ECHO " - use the \`$flag' linker flag" + fi + if test -n "$admincmds"; then + $ECHO " - have your system administrator run these commands:$admincmds" + fi + if test -f /etc/ld.so.conf; then + echo " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'" + fi + echo - echo "See any operating system documentation about shared libraries for" - case $host in - solaris2.[6789]|solaris2.1[0-9]) - echo "more information, such as the ld(1), crle(1) and ld.so(8) manual" - echo "pages." - ;; - *) - echo "more information, such as the ld(1) and ld.so(8) manual pages." - ;; - esac - echo "----------------------------------------------------------------------" + echo "See any operating system documentation about shared libraries for" + case $host in + solaris2.[6789]|solaris2.1[0-9]) + echo "more information, such as the ld(1), crle(1) and ld.so(8) manual" + echo "pages." + ;; + *) + echo "more information, such as the ld(1) and ld.so(8) manual pages." + ;; + esac + echo "----------------------------------------------------------------------" + fi exit $EXIT_SUCCESS } -test "$mode" = finish && func_mode_finish ${1+"$@"} +test "$opt_mode" = finish && func_mode_finish ${1+"$@"} # func_mode_install arg... @@ -2066,7 +2786,7 @@ # The real first argument should be the name of the installation program. # Aesthetically quote it. func_quote_for_eval "$arg" - install_prog="$install_prog$func_quote_for_eval_result" + func_append install_prog "$func_quote_for_eval_result" install_shared_prog=$install_prog case " $install_prog " in *[\\\ /]cp\ *) install_cp=: ;; @@ -2086,7 +2806,7 @@ do arg2= if test -n "$dest"; then - files="$files $dest" + func_append files " $dest" dest=$arg continue fi @@ -2124,11 +2844,11 @@ # Aesthetically quote the argument. func_quote_for_eval "$arg" - install_prog="$install_prog $func_quote_for_eval_result" + func_append install_prog " $func_quote_for_eval_result" if test -n "$arg2"; then func_quote_for_eval "$arg2" fi - install_shared_prog="$install_shared_prog $func_quote_for_eval_result" + func_append install_shared_prog " $func_quote_for_eval_result" done test -z "$install_prog" && \ @@ -2140,7 +2860,7 @@ if test -n "$install_override_mode" && $no_mode; then if $install_cp; then :; else func_quote_for_eval "$install_override_mode" - install_shared_prog="$install_shared_prog -m $func_quote_for_eval_result" + func_append install_shared_prog " -m $func_quote_for_eval_result" fi fi @@ -2198,10 +2918,13 @@ case $file in *.$libext) # Do the static libraries later. - staticlibs="$staticlibs $file" + func_append staticlibs " $file" ;; *.la) + func_resolve_sysroot "$file" + file=$func_resolve_sysroot_result + # Check to see that this really is a libtool archive. func_lalib_unsafe_p "$file" \ || func_fatal_help "\`$file' is not a valid libtool archive" @@ -2215,19 +2938,19 @@ if test "X$destdir" = "X$libdir"; then case "$current_libdirs " in *" $libdir "*) ;; - *) current_libdirs="$current_libdirs $libdir" ;; + *) func_append current_libdirs " $libdir" ;; esac else # Note the libdir as a future libdir. case "$future_libdirs " in *" $libdir "*) ;; - *) future_libdirs="$future_libdirs $libdir" ;; + *) func_append future_libdirs " $libdir" ;; esac fi func_dirname "$file" "/" "" dir="$func_dirname_result" - dir="$dir$objdir" + func_append dir "$objdir" if test -n "$relink_command"; then # Determine the prefix the user has applied to our future dir. @@ -2304,7 +3027,7 @@ func_show_eval "$install_prog $instname $destdir/$name" 'exit $?' # Maybe install the static library, too. - test -n "$old_library" && staticlibs="$staticlibs $dir/$old_library" + test -n "$old_library" && func_append staticlibs " $dir/$old_library" ;; *.lo) @@ -2501,7 +3224,7 @@ fi } -test "$mode" = install && func_mode_install ${1+"$@"} +test "$opt_mode" = install && func_mode_install ${1+"$@"} # func_generate_dlsyms outputname originator pic_p @@ -2548,6 +3271,18 @@ #pragma GCC diagnostic ignored \"-Wstrict-prototypes\" #endif +/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ +#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) +/* DATA imports from DLLs on WIN32 con't be const, because runtime + relocations are performed -- see ld's documentation on pseudo-relocs. */ +# define LT_DLSYM_CONST +#elif defined(__osf__) +/* This system does not cope well with relocations in const data. */ +# define LT_DLSYM_CONST +#else +# define LT_DLSYM_CONST const +#endif + /* External symbol declarations for the compiler. */\ " @@ -2559,8 +3294,9 @@ # Add our own program objects to the symbol list. progfiles=`$ECHO "$objs$old_deplibs" | $SP2NL | $SED "$lo2o" | $NL2SP` for progfile in $progfiles; do - func_verbose "extracting global C symbols from \`$progfile'" - $opt_dry_run || eval "$NM $progfile | $global_symbol_pipe >> '$nlist'" + func_to_tool_file "$progfile" func_convert_file_msys_to_w32 + func_verbose "extracting global C symbols from \`$func_to_tool_file_result'" + $opt_dry_run || eval "$NM $func_to_tool_file_result | $global_symbol_pipe >> '$nlist'" done if test -n "$exclude_expsyms"; then @@ -2609,10 +3345,52 @@ func_verbose "extracting global C symbols from \`$dlprefile'" func_basename "$dlprefile" name="$func_basename_result" - $opt_dry_run || { - eval '$ECHO ": $name " >> "$nlist"' - eval "$NM $dlprefile 2>/dev/null | $global_symbol_pipe >> '$nlist'" - } + case $host in + *cygwin* | *mingw* | *cegcc* ) + # if an import library, we need to obtain dlname + if func_win32_import_lib_p "$dlprefile"; then + func_tr_sh "$dlprefile" + eval "curr_lafile=\$libfile_$func_tr_sh_result" + dlprefile_dlbasename="" + if test -n "$curr_lafile" && func_lalib_p "$curr_lafile"; then + # Use subshell, to avoid clobbering current variable values + dlprefile_dlname=`source "$curr_lafile" && echo "$dlname"` + if test -n "$dlprefile_dlname" ; then + func_basename "$dlprefile_dlname" + dlprefile_dlbasename="$func_basename_result" + else + # no lafile. user explicitly requested -dlpreopen . + $sharedlib_from_linklib_cmd "$dlprefile" + dlprefile_dlbasename=$sharedlib_from_linklib_result + fi + fi + $opt_dry_run || { + if test -n "$dlprefile_dlbasename" ; then + eval '$ECHO ": $dlprefile_dlbasename" >> "$nlist"' + else + func_warning "Could not compute DLL name from $name" + eval '$ECHO ": $name " >> "$nlist"' + fi + func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32 + eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe | + $SED -e '/I __imp/d' -e 's/I __nm_/D /;s/_nm__//' >> '$nlist'" + } + else # not an import lib + $opt_dry_run || { + eval '$ECHO ": $name " >> "$nlist"' + func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32 + eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'" + } + fi + ;; + *) + $opt_dry_run || { + eval '$ECHO ": $name " >> "$nlist"' + func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32 + eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'" + } + ;; + esac done $opt_dry_run || { @@ -2650,26 +3428,9 @@ const char *name; void *address; } lt_dlsymlist; -" - case $host in - *cygwin* | *mingw* | *cegcc* ) - echo >> "$output_objdir/$my_dlsyms" "\ -/* DATA imports from DLLs on WIN32 con't be const, because - runtime relocations are performed -- see ld's documentation - on pseudo-relocs. */" - lt_dlsym_const= ;; - *osf5*) - echo >> "$output_objdir/$my_dlsyms" "\ -/* This system does not cope well with relocations in const data */" - lt_dlsym_const= ;; - *) - lt_dlsym_const=const ;; - esac - - echo >> "$output_objdir/$my_dlsyms" "\ -extern $lt_dlsym_const lt_dlsymlist +extern LT_DLSYM_CONST lt_dlsymlist lt_${my_prefix}_LTX_preloaded_symbols[]; -$lt_dlsym_const lt_dlsymlist +LT_DLSYM_CONST lt_dlsymlist lt_${my_prefix}_LTX_preloaded_symbols[] = {\ { \"$my_originator\", (void *) 0 }," @@ -2725,7 +3486,7 @@ for arg in $LTCFLAGS; do case $arg in -pie | -fpie | -fPIE) ;; - *) symtab_cflags="$symtab_cflags $arg" ;; + *) func_append symtab_cflags " $arg" ;; esac done @@ -2788,7 +3549,8 @@ # Keep the egrep pattern in sync with the one in _LT_CHECK_MAGIC_METHOD. if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null | $EGREP 'file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then - win32_nmres=`eval $NM -f posix -A $1 | + func_to_tool_file "$1" func_convert_file_msys_to_w32 + win32_nmres=`eval $NM -f posix -A \"$func_to_tool_file_result\" | $SED -n -e ' 1,100{ / I /{ @@ -2817,6 +3579,131 @@ $ECHO "$win32_libid_type" } +# func_cygming_dll_for_implib ARG +# +# Platform-specific function to extract the +# name of the DLL associated with the specified +# import library ARG. +# Invoked by eval'ing the libtool variable +# $sharedlib_from_linklib_cmd +# Result is available in the variable +# $sharedlib_from_linklib_result +func_cygming_dll_for_implib () +{ + $opt_debug + sharedlib_from_linklib_result=`$DLLTOOL --identify-strict --identify "$1"` +} + +# func_cygming_dll_for_implib_fallback_core SECTION_NAME LIBNAMEs +# +# The is the core of a fallback implementation of a +# platform-specific function to extract the name of the +# DLL associated with the specified import library LIBNAME. +# +# SECTION_NAME is either .idata$6 or .idata$7, depending +# on the platform and compiler that created the implib. +# +# Echos the name of the DLL associated with the +# specified import library. +func_cygming_dll_for_implib_fallback_core () +{ + $opt_debug + match_literal=`$ECHO "$1" | $SED "$sed_make_literal_regex"` + $OBJDUMP -s --section "$1" "$2" 2>/dev/null | + $SED '/^Contents of section '"$match_literal"':/{ + # Place marker at beginning of archive member dllname section + s/.*/====MARK====/ + p + d + } + # These lines can sometimes be longer than 43 characters, but + # are always uninteresting + /:[ ]*file format pe[i]\{,1\}-/d + /^In archive [^:]*:/d + # Ensure marker is printed + /^====MARK====/p + # Remove all lines with less than 43 characters + /^.\{43\}/!d + # From remaining lines, remove first 43 characters + s/^.\{43\}//' | + $SED -n ' + # Join marker and all lines until next marker into a single line + /^====MARK====/ b para + H + $ b para + b + :para + x + s/\n//g + # Remove the marker + s/^====MARK====// + # Remove trailing dots and whitespace + s/[\. \t]*$// + # Print + /./p' | + # we now have a list, one entry per line, of the stringified + # contents of the appropriate section of all members of the + # archive which possess that section. Heuristic: eliminate + # all those which have a first or second character that is + # a '.' (that is, objdump's representation of an unprintable + # character.) This should work for all archives with less than + # 0x302f exports -- but will fail for DLLs whose name actually + # begins with a literal '.' or a single character followed by + # a '.'. + # + # Of those that remain, print the first one. + $SED -e '/^\./d;/^.\./d;q' +} + +# func_cygming_gnu_implib_p ARG +# This predicate returns with zero status (TRUE) if +# ARG is a GNU/binutils-style import library. Returns +# with nonzero status (FALSE) otherwise. +func_cygming_gnu_implib_p () +{ + $opt_debug + func_to_tool_file "$1" func_convert_file_msys_to_w32 + func_cygming_gnu_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $EGREP ' (_head_[A-Za-z0-9_]+_[ad]l*|[A-Za-z0-9_]+_[ad]l*_iname)$'` + test -n "$func_cygming_gnu_implib_tmp" +} + +# func_cygming_ms_implib_p ARG +# This predicate returns with zero status (TRUE) if +# ARG is an MS-style import library. Returns +# with nonzero status (FALSE) otherwise. +func_cygming_ms_implib_p () +{ + $opt_debug + func_to_tool_file "$1" func_convert_file_msys_to_w32 + func_cygming_ms_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $GREP '_NULL_IMPORT_DESCRIPTOR'` + test -n "$func_cygming_ms_implib_tmp" +} + +# func_cygming_dll_for_implib_fallback ARG +# Platform-specific function to extract the +# name of the DLL associated with the specified +# import library ARG. +# +# This fallback implementation is for use when $DLLTOOL +# does not support the --identify-strict option. +# Invoked by eval'ing the libtool variable +# $sharedlib_from_linklib_cmd +# Result is available in the variable +# $sharedlib_from_linklib_result +func_cygming_dll_for_implib_fallback () +{ + $opt_debug + if func_cygming_gnu_implib_p "$1" ; then + # binutils import library + sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$7' "$1"` + elif func_cygming_ms_implib_p "$1" ; then + # ms-generated import library + sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$6' "$1"` + else + # unknown + sharedlib_from_linklib_result="" + fi +} # func_extract_an_archive dir oldlib @@ -3195,6 +4082,18 @@ if test -f \"\$progdir/\$program\"; then" + # fixup the dll searchpath if we need to. + # + # Fix the DLL searchpath if we need to. Do this before prepending + # to shlibpath, because on Windows, both are PATH and uninstalled + # libraries must come first. + if test -n "$dllsearchpath"; then + $ECHO "\ + # Add the dll search path components to the executable PATH + PATH=$dllsearchpath:\$PATH +" + fi + # Export our shlibpath_var if we have one. if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then $ECHO "\ @@ -3209,14 +4108,6 @@ " fi - # fixup the dll searchpath if we need to. - if test -n "$dllsearchpath"; then - $ECHO "\ - # Add the dll search path components to the executable PATH - PATH=$dllsearchpath:\$PATH -" - fi - $ECHO "\ if test \"\$libtool_execute_magic\" != \"$magic\"; then # Run the actual program with our arguments. @@ -3234,166 +4125,6 @@ } -# func_to_host_path arg -# -# Convert paths to host format when used with build tools. -# Intended for use with "native" mingw (where libtool itself -# is running under the msys shell), or in the following cross- -# build environments: -# $build $host -# mingw (msys) mingw [e.g. native] -# cygwin mingw -# *nix + wine mingw -# where wine is equipped with the `winepath' executable. -# In the native mingw case, the (msys) shell automatically -# converts paths for any non-msys applications it launches, -# but that facility isn't available from inside the cwrapper. -# Similar accommodations are necessary for $host mingw and -# $build cygwin. Calling this function does no harm for other -# $host/$build combinations not listed above. -# -# ARG is the path (on $build) that should be converted to -# the proper representation for $host. The result is stored -# in $func_to_host_path_result. -func_to_host_path () -{ - func_to_host_path_result="$1" - if test -n "$1"; then - case $host in - *mingw* ) - lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g' - case $build in - *mingw* ) # actually, msys - # awkward: cmd appends spaces to result - func_to_host_path_result=`( cmd //c echo "$1" ) 2>/dev/null | - $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"` - ;; - *cygwin* ) - func_to_host_path_result=`cygpath -w "$1" | - $SED -e "$lt_sed_naive_backslashify"` - ;; - * ) - # Unfortunately, winepath does not exit with a non-zero - # error code, so we are forced to check the contents of - # stdout. On the other hand, if the command is not - # found, the shell will set an exit code of 127 and print - # *an error message* to stdout. So we must check for both - # error code of zero AND non-empty stdout, which explains - # the odd construction: - func_to_host_path_tmp1=`winepath -w "$1" 2>/dev/null` - if test "$?" -eq 0 && test -n "${func_to_host_path_tmp1}"; then - func_to_host_path_result=`$ECHO "$func_to_host_path_tmp1" | - $SED -e "$lt_sed_naive_backslashify"` - else - # Allow warning below. - func_to_host_path_result= - fi - ;; - esac - if test -z "$func_to_host_path_result" ; then - func_error "Could not determine host path corresponding to" - func_error " \`$1'" - func_error "Continuing, but uninstalled executables may not work." - # Fallback: - func_to_host_path_result="$1" - fi - ;; - esac - fi -} -# end: func_to_host_path - -# func_to_host_pathlist arg -# -# Convert pathlists to host format when used with build tools. -# See func_to_host_path(), above. This function supports the -# following $build/$host combinations (but does no harm for -# combinations not listed here): -# $build $host -# mingw (msys) mingw [e.g. native] -# cygwin mingw -# *nix + wine mingw -# -# Path separators are also converted from $build format to -# $host format. If ARG begins or ends with a path separator -# character, it is preserved (but converted to $host format) -# on output. -# -# ARG is a pathlist (on $build) that should be converted to -# the proper representation on $host. The result is stored -# in $func_to_host_pathlist_result. -func_to_host_pathlist () -{ - func_to_host_pathlist_result="$1" - if test -n "$1"; then - case $host in - *mingw* ) - lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g' - # Remove leading and trailing path separator characters from - # ARG. msys behavior is inconsistent here, cygpath turns them - # into '.;' and ';.', and winepath ignores them completely. - func_stripname : : "$1" - func_to_host_pathlist_tmp1=$func_stripname_result - case $build in - *mingw* ) # Actually, msys. - # Awkward: cmd appends spaces to result. - func_to_host_pathlist_result=` - ( cmd //c echo "$func_to_host_pathlist_tmp1" ) 2>/dev/null | - $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"` - ;; - *cygwin* ) - func_to_host_pathlist_result=`cygpath -w -p "$func_to_host_pathlist_tmp1" | - $SED -e "$lt_sed_naive_backslashify"` - ;; - * ) - # unfortunately, winepath doesn't convert pathlists - func_to_host_pathlist_result="" - func_to_host_pathlist_oldIFS=$IFS - IFS=: - for func_to_host_pathlist_f in $func_to_host_pathlist_tmp1 ; do - IFS=$func_to_host_pathlist_oldIFS - if test -n "$func_to_host_pathlist_f" ; then - func_to_host_path "$func_to_host_pathlist_f" - if test -n "$func_to_host_path_result" ; then - if test -z "$func_to_host_pathlist_result" ; then - func_to_host_pathlist_result="$func_to_host_path_result" - else - func_append func_to_host_pathlist_result ";$func_to_host_path_result" - fi - fi - fi - done - IFS=$func_to_host_pathlist_oldIFS - ;; - esac - if test -z "$func_to_host_pathlist_result"; then - func_error "Could not determine the host path(s) corresponding to" - func_error " \`$1'" - func_error "Continuing, but uninstalled executables may not work." - # Fallback. This may break if $1 contains DOS-style drive - # specifications. The fix is not to complicate the expression - # below, but for the user to provide a working wine installation - # with winepath so that path translation in the cross-to-mingw - # case works properly. - lt_replace_pathsep_nix_to_dos="s|:|;|g" - func_to_host_pathlist_result=`echo "$func_to_host_pathlist_tmp1" |\ - $SED -e "$lt_replace_pathsep_nix_to_dos"` - fi - # Now, add the leading and trailing path separators back - case "$1" in - :* ) func_to_host_pathlist_result=";$func_to_host_pathlist_result" - ;; - esac - case "$1" in - *: ) func_append func_to_host_pathlist_result ";" - ;; - esac - ;; - esac - fi -} -# end: func_to_host_pathlist - # func_emit_cwrapperexe_src # emit the source code for a wrapper executable on stdout # Must ONLY be called from within func_mode_link because @@ -3563,14 +4294,14 @@ EOF cat </dev/null` + if test "$want_nocaseglob" = yes; then + shopt -s nocaseglob + potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null` + $nocaseglob + else + potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null` + fi for potent_lib in $potential_libs; do # Follow soft links. if ls -lLd "$potent_lib" 2>/dev/null | @@ -6999,7 +7812,7 @@ if eval $file_magic_cmd \"\$potlib\" 2>/dev/null | $SED -e 10q | $EGREP "$file_magic_regex" > /dev/null; then - newdeplibs="$newdeplibs $a_deplib" + func_append newdeplibs " $a_deplib" a_deplib="" break 2 fi @@ -7024,7 +7837,7 @@ ;; *) # Add a -L argument. - newdeplibs="$newdeplibs $a_deplib" + func_append newdeplibs " $a_deplib" ;; esac done # Gone through all deplibs. @@ -7040,7 +7853,7 @@ if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then case " $predeps $postdeps " in *" $a_deplib "*) - newdeplibs="$newdeplibs $a_deplib" + func_append newdeplibs " $a_deplib" a_deplib="" ;; esac @@ -7053,7 +7866,7 @@ potlib="$potent_lib" # see symlink-check above in file_magic test if eval "\$ECHO \"$potent_lib\"" 2>/dev/null | $SED 10q | \ $EGREP "$match_pattern_regex" > /dev/null; then - newdeplibs="$newdeplibs $a_deplib" + func_append newdeplibs " $a_deplib" a_deplib="" break 2 fi @@ -7078,7 +7891,7 @@ ;; *) # Add a -L argument. - newdeplibs="$newdeplibs $a_deplib" + func_append newdeplibs " $a_deplib" ;; esac done # Gone through all deplibs. @@ -7182,7 +7995,7 @@ *) case " $deplibs " in *" -L$path/$objdir "*) - new_libs="$new_libs -L$path/$objdir" ;; + func_append new_libs " -L$path/$objdir" ;; esac ;; esac @@ -7192,10 +8005,10 @@ -L*) case " $new_libs " in *" $deplib "*) ;; - *) new_libs="$new_libs $deplib" ;; + *) func_append new_libs " $deplib" ;; esac ;; - *) new_libs="$new_libs $deplib" ;; + *) func_append new_libs " $deplib" ;; esac done deplibs="$new_libs" @@ -7212,10 +8025,12 @@ hardcode_libdirs= dep_rpath= rpath="$finalize_rpath" - test "$mode" != relink && rpath="$compile_rpath$rpath" + test "$opt_mode" != relink && rpath="$compile_rpath$rpath" for libdir in $rpath; do if test -n "$hardcode_libdir_flag_spec"; then if test -n "$hardcode_libdir_separator"; then + func_replace_sysroot "$libdir" + libdir=$func_replace_sysroot_result if test -z "$hardcode_libdirs"; then hardcode_libdirs="$libdir" else @@ -7224,18 +8039,18 @@ *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) ;; *) - hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" + func_append hardcode_libdirs "$hardcode_libdir_separator$libdir" ;; esac fi else eval flag=\"$hardcode_libdir_flag_spec\" - dep_rpath="$dep_rpath $flag" + func_append dep_rpath " $flag" fi elif test -n "$runpath_var"; then case "$perm_rpath " in *" $libdir "*) ;; - *) perm_rpath="$perm_rpath $libdir" ;; + *) func_apped perm_rpath " $libdir" ;; esac fi done @@ -7253,7 +8068,7 @@ # We should set the runpath_var. rpath= for dir in $perm_rpath; do - rpath="$rpath$dir:" + func_append rpath "$dir:" done eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var" fi @@ -7261,7 +8076,7 @@ fi shlibpath="$finalize_shlibpath" - test "$mode" != relink && shlibpath="$compile_shlibpath$shlibpath" + test "$opt_mode" != relink && shlibpath="$compile_shlibpath$shlibpath" if test -n "$shlibpath"; then eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var" fi @@ -7287,7 +8102,7 @@ linknames= for link do - linknames="$linknames $link" + func_append linknames " $link" done # Use standard objects if they are pic @@ -7298,7 +8113,7 @@ if test -n "$export_symbols" && test -n "$include_expsyms"; then $opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp" export_symbols="$output_objdir/$libname.uexp" - delfiles="$delfiles $export_symbols" + func_append delfiles " $export_symbols" fi orig_export_symbols= @@ -7329,13 +8144,45 @@ $opt_dry_run || $RM $export_symbols cmds=$export_symbols_cmds save_ifs="$IFS"; IFS='~' - for cmd in $cmds; do + for cmd1 in $cmds; do IFS="$save_ifs" - eval cmd=\"$cmd\" - func_len " $cmd" - len=$func_len_result - if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then + # Take the normal branch if the nm_file_list_spec branch + # doesn't work or if tool conversion is not needed. + case $nm_file_list_spec~$to_tool_file_cmd in + *~func_convert_file_noop | *~func_convert_file_msys_to_w32 | ~*) + try_normal_branch=yes + eval cmd=\"$cmd1\" + func_len " $cmd" + len=$func_len_result + ;; + *) + try_normal_branch=no + ;; + esac + if test "$try_normal_branch" = yes \ + && { test "$len" -lt "$max_cmd_len" \ + || test "$max_cmd_len" -le -1; } + then + func_show_eval "$cmd" 'exit $?' + skipped_export=false + elif test -n "$nm_file_list_spec"; then + func_basename "$output" + output_la=$func_basename_result + save_libobjs=$libobjs + save_output=$output + output=${output_objdir}/${output_la}.nm + func_to_tool_file "$output" + libobjs=$nm_file_list_spec$func_to_tool_file_result + func_append delfiles " $output" + func_verbose "creating $NM input file list: $output" + for obj in $save_libobjs; do + func_to_tool_file "$obj" + $ECHO "$func_to_tool_file_result" + done > "$output" + eval cmd=\"$cmd1\" func_show_eval "$cmd" 'exit $?' + output=$save_output + libobjs=$save_libobjs skipped_export=false else # The command line is too long to execute in one step. @@ -7369,7 +8216,7 @@ # global variables. join(1) would be nice here, but unfortunately # isn't a blessed tool. $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter - delfiles="$delfiles $export_symbols $output_objdir/$libname.filter" + func_append delfiles " $export_symbols $output_objdir/$libname.filter" export_symbols=$output_objdir/$libname.def $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols fi @@ -7379,7 +8226,7 @@ case " $convenience " in *" $test_deplib "*) ;; *) - tmp_deplibs="$tmp_deplibs $test_deplib" + func_append tmp_deplibs " $test_deplib" ;; esac done @@ -7399,21 +8246,21 @@ test "X$libobjs" = "X " && libobjs= else gentop="$output_objdir/${outputname}x" - generated="$generated $gentop" + func_append generated " $gentop" func_extract_archives $gentop $convenience - libobjs="$libobjs $func_extract_archives_result" + func_append libobjs " $func_extract_archives_result" test "X$libobjs" = "X " && libobjs= fi fi if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then eval flag=\"$thread_safe_flag_spec\" - linker_flags="$linker_flags $flag" + func_append linker_flags " $flag" fi # Make a backup of the uninstalled library when relinking - if test "$mode" = relink; then + if test "$opt_mode" = relink; then $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $? fi @@ -7475,10 +8322,13 @@ echo 'INPUT (' > $output for obj in $save_libobjs do - $ECHO "$obj" >> $output + func_to_tool_file "$obj" + $ECHO "$func_to_tool_file_result" >> $output done echo ')' >> $output - delfiles="$delfiles $output" + func_append delfiles " $output" + func_to_tool_file "$output" + output=$func_to_tool_file_result elif test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "X$file_list_spec" != X; then output=${output_objdir}/${output_la}.lnk func_verbose "creating linker input file list: $output" @@ -7492,10 +8342,12 @@ fi for obj do - $ECHO "$obj" >> $output + func_to_tool_file "$obj" + $ECHO "$func_to_tool_file_result" >> $output done - delfiles="$delfiles $output" - output=$firstobj\"$file_list_spec$output\" + func_append delfiles " $output" + func_to_tool_file "$output" + output=$firstobj\"$file_list_spec$func_to_tool_file_result\" else if test -n "$save_libobjs"; then func_verbose "creating reloadable object files..." @@ -7546,7 +8398,7 @@ if test -n "$last_robj"; then eval concat_cmds=\"\${concat_cmds}~\$RM $last_robj\" fi - delfiles="$delfiles $output" + func_append delfiles " $output" else output= @@ -7580,7 +8432,7 @@ lt_exit=$? # Restore the uninstalled library and exit - if test "$mode" = relink; then + if test "$opt_mode" = relink; then ( cd "$output_objdir" && \ $RM "${realname}T" && \ $MV "${realname}U" "$realname" ) @@ -7613,7 +8465,7 @@ # global variables. join(1) would be nice here, but unfortunately # isn't a blessed tool. $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter - delfiles="$delfiles $export_symbols $output_objdir/$libname.filter" + func_append delfiles " $export_symbols $output_objdir/$libname.filter" export_symbols=$output_objdir/$libname.def $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols fi @@ -7654,10 +8506,10 @@ # Add any objects from preloaded convenience libraries if test -n "$dlprefiles"; then gentop="$output_objdir/${outputname}x" - generated="$generated $gentop" + func_append generated " $gentop" func_extract_archives $gentop $dlprefiles - libobjs="$libobjs $func_extract_archives_result" + func_append libobjs " $func_extract_archives_result" test "X$libobjs" = "X " && libobjs= fi @@ -7673,7 +8525,7 @@ lt_exit=$? # Restore the uninstalled library and exit - if test "$mode" = relink; then + if test "$opt_mode" = relink; then ( cd "$output_objdir" && \ $RM "${realname}T" && \ $MV "${realname}U" "$realname" ) @@ -7685,7 +8537,7 @@ IFS="$save_ifs" # Restore the uninstalled library and exit - if test "$mode" = relink; then + if test "$opt_mode" = relink; then $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $? if test -n "$convenience"; then @@ -7769,13 +8621,16 @@ reload_conv_objs=$reload_objs\ `$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'` else gentop="$output_objdir/${obj}x" - generated="$generated $gentop" + func_append generated " $gentop" func_extract_archives $gentop $convenience reload_conv_objs="$reload_objs $func_extract_archives_result" fi fi + # If we're not building shared, we need to use non_pic_objs + test "$build_libtool_libs" != yes && libobjs="$non_pic_objects" + # Create the old-style object. reload_objs="$objs$old_deplibs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; /\.lib$/d; $lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test @@ -7849,8 +8704,8 @@ if test "$tagname" = CXX ; then case ${MACOSX_DEPLOYMENT_TARGET-10.0} in 10.[0123]) - compile_command="$compile_command ${wl}-bind_at_load" - finalize_command="$finalize_command ${wl}-bind_at_load" + func_append compile_command " ${wl}-bind_at_load" + func_append finalize_command " ${wl}-bind_at_load" ;; esac fi @@ -7870,7 +8725,7 @@ *) case " $compile_deplibs " in *" -L$path/$objdir "*) - new_libs="$new_libs -L$path/$objdir" ;; + func_append new_libs " -L$path/$objdir" ;; esac ;; esac @@ -7880,17 +8735,17 @@ -L*) case " $new_libs " in *" $deplib "*) ;; - *) new_libs="$new_libs $deplib" ;; + *) func_append new_libs " $deplib" ;; esac ;; - *) new_libs="$new_libs $deplib" ;; + *) func_append new_libs " $deplib" ;; esac done compile_deplibs="$new_libs" - compile_command="$compile_command $compile_deplibs" - finalize_command="$finalize_command $finalize_deplibs" + func_append compile_command " $compile_deplibs" + func_append finalize_command " $finalize_deplibs" if test -n "$rpath$xrpath"; then # If the user specified any rpath flags, then add them. @@ -7898,7 +8753,7 @@ # This is the magic to use -rpath. case "$finalize_rpath " in *" $libdir "*) ;; - *) finalize_rpath="$finalize_rpath $libdir" ;; + *) func_append finalize_rpath " $libdir" ;; esac done fi @@ -7917,18 +8772,18 @@ *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) ;; *) - hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" + func_append hardcode_libdirs "$hardcode_libdir_separator$libdir" ;; esac fi else eval flag=\"$hardcode_libdir_flag_spec\" - rpath="$rpath $flag" + func_append rpath " $flag" fi elif test -n "$runpath_var"; then case "$perm_rpath " in *" $libdir "*) ;; - *) perm_rpath="$perm_rpath $libdir" ;; + *) func_append perm_rpath " $libdir" ;; esac fi case $host in @@ -7937,12 +8792,12 @@ case :$dllsearchpath: in *":$libdir:"*) ;; ::) dllsearchpath=$libdir;; - *) dllsearchpath="$dllsearchpath:$libdir";; + *) func_append dllsearchpath ":$libdir";; esac case :$dllsearchpath: in *":$testbindir:"*) ;; ::) dllsearchpath=$testbindir;; - *) dllsearchpath="$dllsearchpath:$testbindir";; + *) func_append dllsearchpath ":$testbindir";; esac ;; esac @@ -7968,18 +8823,18 @@ *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) ;; *) - hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" + func_append hardcode_libdirs "$hardcode_libdir_separator$libdir" ;; esac fi else eval flag=\"$hardcode_libdir_flag_spec\" - rpath="$rpath $flag" + func_append rpath " $flag" fi elif test -n "$runpath_var"; then case "$finalize_perm_rpath " in *" $libdir "*) ;; - *) finalize_perm_rpath="$finalize_perm_rpath $libdir" ;; + *) func_append finalize_perm_rpath " $libdir" ;; esac fi done @@ -8030,6 +8885,12 @@ exit_status=0 func_show_eval "$link_command" 'exit_status=$?' + if test -n "$postlink_cmds"; then + func_to_tool_file "$output" + postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'` + func_execute_cmds "$postlink_cmds" 'exit $?' + fi + # Delete the generated files. if test -f "$output_objdir/${outputname}S.${objext}"; then func_show_eval '$RM "$output_objdir/${outputname}S.${objext}"' @@ -8052,7 +8913,7 @@ # We should set the runpath_var. rpath= for dir in $perm_rpath; do - rpath="$rpath$dir:" + func_append rpath "$dir:" done compile_var="$runpath_var=\"$rpath\$$runpath_var\" " fi @@ -8060,7 +8921,7 @@ # We should set the runpath_var. rpath= for dir in $finalize_perm_rpath; do - rpath="$rpath$dir:" + func_append rpath "$dir:" done finalize_var="$runpath_var=\"$rpath\$$runpath_var\" " fi @@ -8075,6 +8936,13 @@ $opt_dry_run || $RM $output # Link the executable and exit func_show_eval "$link_command" 'exit $?' + + if test -n "$postlink_cmds"; then + func_to_tool_file "$output" + postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'` + func_execute_cmds "$postlink_cmds" 'exit $?' + fi + exit $EXIT_SUCCESS fi @@ -8108,6 +8976,12 @@ func_show_eval "$link_command" 'exit $?' + if test -n "$postlink_cmds"; then + func_to_tool_file "$output_objdir/$outputname" + postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'` + func_execute_cmds "$postlink_cmds" 'exit $?' + fi + # Now create the wrapper script. func_verbose "creating $output" @@ -8205,7 +9079,7 @@ else oldobjs="$old_deplibs $non_pic_objects" if test "$preload" = yes && test -f "$symfileobj"; then - oldobjs="$oldobjs $symfileobj" + func_append oldobjs " $symfileobj" fi fi addlibs="$old_convenience" @@ -8213,10 +9087,10 @@ if test -n "$addlibs"; then gentop="$output_objdir/${outputname}x" - generated="$generated $gentop" + func_append generated " $gentop" func_extract_archives $gentop $addlibs - oldobjs="$oldobjs $func_extract_archives_result" + func_append oldobjs " $func_extract_archives_result" fi # Do each command in the archive commands. @@ -8227,10 +9101,10 @@ # Add any objects from preloaded convenience libraries if test -n "$dlprefiles"; then gentop="$output_objdir/${outputname}x" - generated="$generated $gentop" + func_append generated " $gentop" func_extract_archives $gentop $dlprefiles - oldobjs="$oldobjs $func_extract_archives_result" + func_append oldobjs " $func_extract_archives_result" fi # POSIX demands no paths to be encoded in archives. We have @@ -8248,7 +9122,7 @@ else echo "copying selected object files to avoid basename conflicts..." gentop="$output_objdir/${outputname}x" - generated="$generated $gentop" + func_append generated " $gentop" func_mkdir_p "$gentop" save_oldobjs=$oldobjs oldobjs= @@ -8272,9 +9146,9 @@ esac done func_show_eval "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj" - oldobjs="$oldobjs $gentop/$newobj" + func_append oldobjs " $gentop/$newobj" ;; - *) oldobjs="$oldobjs $obj" ;; + *) func_append oldobjs " $obj" ;; esac done fi @@ -8284,6 +9158,16 @@ len=$func_len_result if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then cmds=$old_archive_cmds + elif test -n "$archiver_list_spec"; then + func_verbose "using command file archive linking..." + for obj in $oldobjs + do + func_to_tool_file "$obj" + $ECHO "$func_to_tool_file_result" + done > $output_objdir/$libname.libcmd + func_to_tool_file "$output_objdir/$libname.libcmd" + oldobjs=" $archiver_list_spec$func_to_tool_file_result" + cmds=$old_archive_cmds else # the command line is too long to link in one step, link in parts func_verbose "using piecewise archive linking..." @@ -8380,9 +9264,19 @@ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` test -z "$libdir" && \ func_fatal_error "\`$deplib' is not a valid libtool archive" - newdependency_libs="$newdependency_libs $libdir/$name" + func_append newdependency_libs " ${lt_sysroot:+=}$libdir/$name" + ;; + -L*) + func_stripname -L '' "$deplib" + func_replace_sysroot "$func_stripname_result" + func_append newdependency_libs " -L$func_replace_sysroot_result" + ;; + -R*) + func_stripname -R '' "$deplib" + func_replace_sysroot "$func_stripname_result" + func_append newdependency_libs " -R$func_replace_sysroot_result" ;; - *) newdependency_libs="$newdependency_libs $deplib" ;; + *) func_append newdependency_libs " $deplib" ;; esac done dependency_libs="$newdependency_libs" @@ -8396,9 +9290,9 @@ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` test -z "$libdir" && \ func_fatal_error "\`$lib' is not a valid libtool archive" - newdlfiles="$newdlfiles $libdir/$name" + func_append newdlfiles " ${lt_sysroot:+=}$libdir/$name" ;; - *) newdlfiles="$newdlfiles $lib" ;; + *) func_append newdlfiles " $lib" ;; esac done dlfiles="$newdlfiles" @@ -8415,7 +9309,7 @@ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` test -z "$libdir" && \ func_fatal_error "\`$lib' is not a valid libtool archive" - newdlprefiles="$newdlprefiles $libdir/$name" + func_append newdlprefiles " ${lt_sysroot:+=}$libdir/$name" ;; esac done @@ -8427,7 +9321,7 @@ [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; *) abs=`pwd`"/$lib" ;; esac - newdlfiles="$newdlfiles $abs" + func_append newdlfiles " $abs" done dlfiles="$newdlfiles" newdlprefiles= @@ -8436,7 +9330,7 @@ [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; *) abs=`pwd`"/$lib" ;; esac - newdlprefiles="$newdlprefiles $abs" + func_append newdlprefiles " $abs" done dlprefiles="$newdlprefiles" fi @@ -8521,7 +9415,7 @@ exit $EXIT_SUCCESS } -{ test "$mode" = link || test "$mode" = relink; } && +{ test "$opt_mode" = link || test "$opt_mode" = relink; } && func_mode_link ${1+"$@"} @@ -8541,9 +9435,9 @@ for arg do case $arg in - -f) RM="$RM $arg"; rmforce=yes ;; - -*) RM="$RM $arg" ;; - *) files="$files $arg" ;; + -f) func_append RM " $arg"; rmforce=yes ;; + -*) func_append RM " $arg" ;; + *) func_append files " $arg" ;; esac done @@ -8552,24 +9446,23 @@ rmdirs= - origobjdir="$objdir" for file in $files; do func_dirname "$file" "" "." dir="$func_dirname_result" if test "X$dir" = X.; then - objdir="$origobjdir" + odir="$objdir" else - objdir="$dir/$origobjdir" + odir="$dir/$objdir" fi func_basename "$file" name="$func_basename_result" - test "$mode" = uninstall && objdir="$dir" + test "$opt_mode" = uninstall && odir="$dir" - # Remember objdir for removal later, being careful to avoid duplicates - if test "$mode" = clean; then + # Remember odir for removal later, being careful to avoid duplicates + if test "$opt_mode" = clean; then case " $rmdirs " in - *" $objdir "*) ;; - *) rmdirs="$rmdirs $objdir" ;; + *" $odir "*) ;; + *) func_append rmdirs " $odir" ;; esac fi @@ -8595,18 +9488,17 @@ # Delete the libtool libraries and symlinks. for n in $library_names; do - rmfiles="$rmfiles $objdir/$n" + func_append rmfiles " $odir/$n" done - test -n "$old_library" && rmfiles="$rmfiles $objdir/$old_library" + test -n "$old_library" && func_append rmfiles " $odir/$old_library" - case "$mode" in + case "$opt_mode" in clean) - case " $library_names " in - # " " in the beginning catches empty $dlname + case " $library_names " in *" $dlname "*) ;; - *) rmfiles="$rmfiles $objdir/$dlname" ;; + *) test -n "$dlname" && func_append rmfiles " $odir/$dlname" ;; esac - test -n "$libdir" && rmfiles="$rmfiles $objdir/$name $objdir/${name}i" + test -n "$libdir" && func_append rmfiles " $odir/$name $odir/${name}i" ;; uninstall) if test -n "$library_names"; then @@ -8634,19 +9526,19 @@ # Add PIC object to the list of files to remove. if test -n "$pic_object" && test "$pic_object" != none; then - rmfiles="$rmfiles $dir/$pic_object" + func_append rmfiles " $dir/$pic_object" fi # Add non-PIC object to the list of files to remove. if test -n "$non_pic_object" && test "$non_pic_object" != none; then - rmfiles="$rmfiles $dir/$non_pic_object" + func_append rmfiles " $dir/$non_pic_object" fi fi ;; *) - if test "$mode" = clean ; then + if test "$opt_mode" = clean ; then noexename=$name case $file in *.exe) @@ -8656,7 +9548,7 @@ noexename=$func_stripname_result # $file with .exe has already been added to rmfiles, # add $file without .exe - rmfiles="$rmfiles $file" + func_append rmfiles " $file" ;; esac # Do a test to see if this is a libtool program. @@ -8665,7 +9557,7 @@ func_ltwrapper_scriptname "$file" relink_command= func_source $func_ltwrapper_scriptname_result - rmfiles="$rmfiles $func_ltwrapper_scriptname_result" + func_append rmfiles " $func_ltwrapper_scriptname_result" else relink_command= func_source $dir/$noexename @@ -8673,12 +9565,12 @@ # note $name still contains .exe if it was in $file originally # as does the version of $file that was added into $rmfiles - rmfiles="$rmfiles $objdir/$name $objdir/${name}S.${objext}" + func_append rmfiles " $odir/$name $odir/${name}S.${objext}" if test "$fast_install" = yes && test -n "$relink_command"; then - rmfiles="$rmfiles $objdir/lt-$name" + func_append rmfiles " $odir/lt-$name" fi if test "X$noexename" != "X$name" ; then - rmfiles="$rmfiles $objdir/lt-${noexename}.c" + func_append rmfiles " $odir/lt-${noexename}.c" fi fi fi @@ -8686,7 +9578,6 @@ esac func_show_eval "$RM $rmfiles" 'exit_status=1' done - objdir="$origobjdir" # Try to remove the ${objdir}s in the directories where we deleted files for dir in $rmdirs; do @@ -8698,16 +9589,16 @@ exit $exit_status } -{ test "$mode" = uninstall || test "$mode" = clean; } && +{ test "$opt_mode" = uninstall || test "$opt_mode" = clean; } && func_mode_uninstall ${1+"$@"} -test -z "$mode" && { +test -z "$opt_mode" && { help="$generic_help" func_fatal_help "you must specify a MODE" } test -z "$exec_cmd" && \ - func_fatal_help "invalid operation mode \`$mode'" + func_fatal_help "invalid operation mode \`$opt_mode'" if test -n "$exec_cmd"; then eval exec "$exec_cmd" diff -Nru couchdb-1.2.0/build-aux/missing couchdb-1.4.0~rc.1/build-aux/missing --- couchdb-1.2.0/build-aux/missing 2011-11-03 17:56:27.000000000 -0400 +++ couchdb-1.4.0~rc.1/build-aux/missing 2012-09-03 11:10:15.000000000 -0400 @@ -1,10 +1,10 @@ #! /bin/sh # Common stub for a few missing GNU programs while installing. -scriptversion=2006-05-10.23 +scriptversion=2012-01-06.13; # UTC -# Copyright (C) 1996, 1997, 1999, 2000, 2002, 2003, 2004, 2005, 2006 -# Free Software Foundation, Inc. +# Copyright (C) 1996, 1997, 1999, 2000, 2002, 2003, 2004, 2005, 2006, +# 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc. # Originally by Fran,cois Pinard , 1996. # This program is free software; you can redistribute it and/or modify @@ -18,9 +18,7 @@ # GNU General Public License for more details. # You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA -# 02110-1301, USA. +# along with this program. If not, see . # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a @@ -86,9 +84,11 @@ help2man touch the output file lex create \`lex.yy.c', if possible, from existing .c makeinfo touch the output file - tar try tar, gnutar, gtar, then tar without non-portable flags yacc create \`y.tab.[ch]', if possible, from existing .[ch] +Version suffixes to PROGRAM as well as the prefixes \`gnu-', \`gnu', and +\`g' are ignored when checking the name. + Send bug reports to ." exit $? ;; @@ -106,23 +106,21 @@ esac +# normalize program name to check for. +program=`echo "$1" | sed ' + s/^gnu-//; t + s/^gnu//; t + s/^g//; t'` + # Now exit if we have it, but it failed. Also exit now if we # don't have it and --version was passed (most likely to detect -# the program). +# the program). This is about non-GNU programs, so use $1 not +# $program. case $1 in - lex|yacc) + lex*|yacc*) # Not GNU programs, they don't have --version. ;; - tar) - if test -n "$run"; then - echo 1>&2 "ERROR: \`tar' requires --run" - exit 1 - elif test "x$2" = "x--version" || test "x$2" = "x--help"; then - exit 1 - fi - ;; - *) if test -z "$run" && ($1 --version) > /dev/null 2>&1; then # We have it, but it failed. @@ -138,7 +136,7 @@ # If it does not exist, or fails to run (possibly an outdated version), # try to emulate it. -case $1 in +case $program in aclocal*) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if @@ -148,7 +146,7 @@ touch aclocal.m4 ;; - autoconf) + autoconf*) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified \`${configure_ac}'. You might want to install the @@ -157,7 +155,7 @@ touch configure ;; - autoheader) + autoheader*) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified \`acconfig.h' or \`${configure_ac}'. You might want @@ -187,7 +185,7 @@ while read f; do touch "$f"; done ;; - autom4te) + autom4te*) echo 1>&2 "\ WARNING: \`$1' is needed, but is $msg. You might have modified some files without having the @@ -210,7 +208,7 @@ fi ;; - bison|yacc) + bison*|yacc*) echo 1>&2 "\ WARNING: \`$1' $msg. You should only need it if you modified a \`.y' file. You may need the \`Bison' package @@ -218,7 +216,7 @@ \`Bison' from any GNU archive site." rm -f y.tab.c y.tab.h if test $# -ne 1; then - eval LASTARG="\${$#}" + eval LASTARG=\${$#} case $LASTARG in *.y) SRCFILE=`echo "$LASTARG" | sed 's/y$/c/'` @@ -240,7 +238,7 @@ fi ;; - lex|flex) + lex*|flex*) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified a \`.l' file. You may need the \`Flex' package @@ -248,7 +246,7 @@ \`Flex' from any GNU archive site." rm -f lex.yy.c if test $# -ne 1; then - eval LASTARG="\${$#}" + eval LASTARG=\${$#} case $LASTARG in *.l) SRCFILE=`echo "$LASTARG" | sed 's/l$/c/'` @@ -263,7 +261,7 @@ fi ;; - help2man) + help2man*) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified a dependency of a manual page. You may need the @@ -277,11 +275,11 @@ else test -z "$file" || exec >$file echo ".ab help2man is required to generate this page" - exit 1 + exit $? fi ;; - makeinfo) + makeinfo*) echo 1>&2 "\ WARNING: \`$1' is $msg. You should only need it if you modified a \`.texi' or \`.texinfo' file, or any other file @@ -310,41 +308,6 @@ touch $file ;; - tar) - shift - - # We have already tried tar in the generic part. - # Look for gnutar/gtar before invocation to avoid ugly error - # messages. - if (gnutar --version > /dev/null 2>&1); then - gnutar "$@" && exit 0 - fi - if (gtar --version > /dev/null 2>&1); then - gtar "$@" && exit 0 - fi - firstarg="$1" - if shift; then - case $firstarg in - *o*) - firstarg=`echo "$firstarg" | sed s/o//` - tar "$firstarg" "$@" && exit 0 - ;; - esac - case $firstarg in - *h*) - firstarg=`echo "$firstarg" | sed s/h//` - tar "$firstarg" "$@" && exit 0 - ;; - esac - fi - - echo 1>&2 "\ -WARNING: I can't seem to be able to run \`tar' with the given arguments. - You may want to install GNU tar or Free paxutils, or check the - command line arguments." - exit 1 - ;; - *) echo 1>&2 "\ WARNING: \`$1' is needed, and is $msg. @@ -363,5 +326,6 @@ # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "scriptversion=" # time-stamp-format: "%:y-%02m-%02d.%02H" -# time-stamp-end: "$" +# time-stamp-time-zone: "UTC" +# time-stamp-end: "; # UTC" # End: diff -Nru couchdb-1.2.0/build-aux/sphinx-build couchdb-1.4.0~rc.1/build-aux/sphinx-build --- couchdb-1.2.0/build-aux/sphinx-build 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/build-aux/sphinx-build 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,67 @@ +#!/bin/sh -e + +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +# This script is called by the build system and is used to call sphinx-build if +# is is available, or alternatively, emit a warning, and perform a no-op. Any +# required directories or Makefiles are created and stubbed out as appropriate. + +if test -z "`which sphinx-build`"; then + missing=yes + cat << EOF +WARNING: 'sphinx-build' is needed, and is missing on your system. + You might have modified some files without having the + proper tools for further handling them. +EOF +fi + +if test "$2" = "texinfo"; then + if test -z "`which makeinfo`"; then + missing=yes + cat << EOF +WARNING: 'makeinfo' is needed, and is missing on your system. + You might have modified some files without having the + proper tools for further handling them. +EOF + fi + if test "$missing" != "yes"; then + sphinx-build $* + else + mkdir -p texinfo + echo "info:" > texinfo/Makefile + fi +fi + +if test "$2" = "latex"; then + if test -z "`which pdflatex`"; then + missing=yes + cat << EOF +WARNING: 'pdflatex' is needed, and is missing on your system. + You might have modified some files without having the + proper tools for further handling them. +EOF + fi + if test "$missing" != "yes"; then + sphinx-build $* + else + mkdir -p latex + echo "all-pdf:" > latex/Makefile + fi +fi +if test "$2" = "html"; then + if test "$missing" != "yes"; then + sphinx-build $* + else + mkdir -p html + fi +fi diff -Nru couchdb-1.2.0/build-aux/sphinx-touch couchdb-1.4.0~rc.1/build-aux/sphinx-touch --- couchdb-1.2.0/build-aux/sphinx-touch 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/build-aux/sphinx-touch 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,24 @@ +#!/bin/sh -e + +# Licensed under the Apache License, Version 2.0 (the "License"); you may not +# use this file except in compliance with the License. You may obtain a copy of +# the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations under +# the License. + +# This script is called by the build system and is used to touch the list of +# expected output files when sphinx-build is not available. If the files exist, +# this will satisfy make. If they do not exist, we create of empty files. + +if test -z "`which sphinx-build`"; then + for file in $*; do + mkdir -p `dirname $file` + touch $file + done +fi \ No newline at end of file diff -Nru couchdb-1.2.0/CHANGES couchdb-1.4.0~rc.1/CHANGES --- couchdb-1.2.0/CHANGES 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/CHANGES 1969-12-31 19:00:00.000000000 -0500 @@ -1,874 +0,0 @@ -Apache CouchDB CHANGES -====================== - -Version 1.2.0 -------------- - -Authentication: - - * Fix use of OAuth with VHosts and URL rewriting. - * OAuth secrets can now be stored in the users system database - as an alternative to key value pairs in the .ini configuration. - By default this is disabled (secrets are stored in the .ini) - but can be enabled via the .ini configuration key `use_users_db` - in the `couch_httpd_oauth` section. - * Documents in the _users database are no longer publicly - readable. - * Confidential information in the _replication database is no - longer publicly readable. - * Password hashes are now calculated by CouchDB. Clients are no - longer required to do this manually. - * Cookies used for authentication can be made persistent by enabling - the .ini configuration key `allow_persistent_cookies' in the - `couch_httpd_auth` section. - -Build System: - - * cURL is no longer required to build CouchDB as it is only - used by the command line JS test runner. If cURL is available - when building CouchJS you can enable the HTTP bindings by - passing -H on the command line. - * Temporarily made `make check` pass with R15B. A more thorough - fix is in the works (COUCHDB-1424). - * Fixed --with-js-include and --with-js-lib options. - * Added --with-js-lib-name option. - -HTTP Interface: - - * Added a native JSON parser. - * The _active_tasks API now offers more granular fields. Each - task type is now able to expose different properties. - * Added built-in changes feed filter `_view`. - * Fixes to the `_changes` feed heartbeat option which caused - heartbeats to be missed when used with a filter. This caused - timeouts of continuous pull replications with a filter. - * Properly restart the SSL socket on configuration changes. - -Replicator: - - * A new replicator implementation. It offers more performance and - configuration options. - * Passing non-string values to query_params is now a 400 bad - request. This is to reduce the surprise that all parameters - are converted to strings internally. - * Added optional field `since_seq` to replication objects/documents. - It allows to bootstrap a replication from a specific source sequence - number. - * Simpler replication cancellation. In addition to the current method, - replications can now be canceled by specifying the replication ID - instead of the original replication object/document. - -Storage System: - - * Added optional database and view index file compression (using Google's - snappy or zlib's deflate). This feature is enabled by default, but it - can be disabled by adapting local.ini accordingly. The on-disk format - is upgraded on compaction and new DB/view creation to support this. - * Several performance improvements, most notably regarding database writes - and view indexing. - * Computation of the size of the latest MVCC snapshot data and all its - supporting metadata, both for database and view index files. This - information is exposed as the `data_size` attribute in the database and - view group information URIs. - * The size of the buffers used for database and view compaction is now - configurable. - * Added support for automatic database and view compaction. This feature - is disabled by default, but it can be enabled via the .ini configuration. - * Performance improvements for the built-in changes feed filters `_doc_ids` - and `_design'. - -View Server: - - * Add CoffeeScript (http://coffeescript.org/) as a first class view server - language. - * Fixed old index file descriptor leaks after a view cleanup. - * The requested_path property keeps the pre-rewrite path even when no VHost - configuration is matched. - * Fixed incorrect reduce query results when using pagination parameters. - * Made icu_driver work with Erlang R15B and later. - * Avoid invalidating view indexes when running out of file descriptors. - -OAuth: - - * Updated bundled erlang_oauth library to the latest version. - -Futon: - - * The `Status` screen (active tasks) now displays two new task status - fields: `Started on` and `Updated on`. - * Futon remembers view code every time it is saved, allowing to save an - edit that amounts to a revert. - -Log System: - - * Log correct stacktrace in all cases. - * Improvements to log messages for file-related errors. - -Version 1.1.1 -------------- - -* Support SpiderMonkey 1.8.5 -* Add configurable maximum to the number of bytes returned by _log. -* Allow CommonJS modules to be an empty string. -* Bump minimum Erlang version to R13B02. -* Do not run deleted validate_doc_update functions. -* ETags for views include current sequence if include_docs=true. -* Fix bug where duplicates can appear in _changes feed. -* Fix bug where update handlers break after conflict resolution. -* Fix bug with _replicator where include "filter" could crash couch. -* Fix crashes when compacting large views. -* Fix file descriptor leak in _log -* Fix missing revisions in _changes?style=all_docs. -* Improve handling of compaction at max_dbs_open limit. -* JSONP responses now send "text/javascript" for Content-Type. -* Link to ICU 4.2 on Windows. -* Permit forward slashes in path to update functions. -* Reap couchjs processes that hit reduce_overflow error. -* Status code can be specified in update handlers. -* Support provides() in show functions. -* _view_cleanup when ddoc has no views now removes all index files. -* max_replication_retry_count now supports "infinity". -* Fix replication crash when source database has a document with empty ID. -* Fix deadlock when assigning couchjs processes to serve requests. -* Fixes to the document multipart PUT API. -* Fixes regarding file descriptor leaks for databases with views. - -Version 1.1.0 -------------- - -All CHANGES for 1.0.2 and 1.0.3 also apply to 1.1.0. - -HTTP Interface: - - * Native SSL support. - * Added support for HTTP range requests for attachments. - * Added built-in filters for `_changes`: `_doc_ids` and `_design`. - * Added configuration option for TCP_NODELAY aka "Nagle". - * Allow POSTing arguments to `_changes`. - * Allow `keys` parameter for GET requests to views. - * Allow wildcards in vhosts definitions. - * More granular ETag support for views. - * More flexible URL rewriter. - * Added support for recognizing "Q values" and media parameters in - HTTP Accept headers. - * Validate doc ids that come from a PUT to a URL. - -Externals: - - * Added OS Process module to manage daemons outside of CouchDB. - * Added HTTP Proxy handler for more scalable externals. - -Replicator: - - * Added `_replicator` database to manage replications. - * Fixed issues when an endpoint is a remote database accessible via SSL. - * Added support for continuous by-doc-IDs replication. - * Fix issue where revision info was omitted when replicating attachments. - * Integrity of attachment replication is now verified by MD5. - -Storage System: - - * Multiple micro-optimizations when reading data. - -View Server: - - * Added CommonJS support to map functions. - * Added `stale=update_after` query option that triggers a view update after - returning a `stale=ok` response. - * Warn about empty result caused by `startkey` and `endkey` limiting. - * Built-in reduce function `_sum` now accepts lists of integers as input. - * Added view query aliases start_key, end_key, start_key_doc_id and - end_key_doc_id. - -Futon: - - * Added a "change password"-feature to Futon. - -URL Rewriter & Vhosts: - - * Fix for variable substituion - -Version 1.0.3 -------------- - -General: - - * Fixed compatibility issues with Erlang R14B02. - -HTTP Interface: - - * Fix bug that allows invalid UTF-8 after valid escapes. - * The query parameter `include_docs` now honors the parameter `conflicts`. - This applies to queries against map views, _all_docs and _changes. - * Added support for inclusive_end with reduce views. - -Storage System: - - * More performant queries against _changes and _all_docs when using the - `include_docs` parameter. - -Replicator: - - * Enabled replication over IPv6. - * Fixed for crashes in continuous and filtered changes feeds. - * Fixed error when restarting replications in OTP R14B02. - * Upgrade ibrowse to version 2.2.0. - * Fixed bug when using a filter and a limit of 1. - -Security: - - * Fixed OAuth signature computation in OTP R14B02. - * Handle passwords with : in them. - -Futon: - - * Made compatible with jQuery 1.5.x. - -Etap Test Suite: - - * Etap tests no longer require use of port 5984. They now use a randomly - selected port so they won't clash with a running CouchDB. - -Windows: - - * Windows builds now require ICU >= 4.4.0 and Erlang >= R14B03. See - COUCHDB-1152, and COUCHDB-963 + OTP-9139 for more information. - -Version 1.0.2 -------------- - -Futon: - - * Make test suite work with Safari and Chrome. - * Fixed animated progress spinner. - * Fix raw view document link due to overzealous URI encoding. - * Spell javascript correctly in loadScript(uri). - -Storage System: - - * Fix leaking file handles after compacting databases and views. - * Fix databases forgetting their validation function after compaction. - * Fix occasional timeout errors after successfully compacting large databases. - * Fix ocassional error when writing to a database that has just been compacted. - * Fix occasional timeout errors on systems with slow or heavily loaded IO. - * Fix for OOME when compactions include documents with many conflicts. - * Fix for missing attachment compression when MIME types included parameters. - * Preserve purge metadata during compaction to avoid spurious view rebuilds. - * Fix spurious conflicts introduced when uploading an attachment after - a doc has been in a conflict. See COUCHDB-902 for details. - * Fix for frequently edited documents in multi-master deployments being - duplicated in _changes and _all_docs. See COUCHDDB-968 for details on how - to repair. - * Significantly higher read and write throughput against database and - view index files. - -Log System: - - * Reduce lengthy stack traces. - * Allow logging of native types. - -HTTP Interface: - - * Allow reduce=false parameter in map-only views. - * Fix parsing of Accept headers. - * Fix for multipart GET APIs when an attachment was created during a - local-local replication. See COUCHDB-1022 for details. - -Replicator: - - * Updated ibrowse library to 2.1.2 fixing numerous replication issues. - * Make sure that the replicator respects HTTP settings defined in the config. - * Fix error when the ibrowse connection closes unexpectedly. - * Fix authenticated replication (with HTTP basic auth) of design documents - with attachments. - * Various fixes to make replication more resilient for edge-cases. - -View Server: - - * Don't trigger view updates when requesting `_design/doc/_info`. - * Fix for circular references in CommonJS requires. - * Made isArray() function available to functions executed in the query server. - * Documents are now sealed before being passed to map functions. - * Force view compaction failure when duplicated document data exists. When - this error is seen in the logs users should rebuild their views from - scratch to fix the issue. See COUCHDB-999 for details. - -Version 1.0.1 -------------- - -Storage System: - - * Fix data corruption bug COUCHDB-844. Please see - http://couchdb.apache.org/notice/1.0.1.html for details. - -Replicator: - - * Added support for replication via an HTTP/HTTPS proxy. - * Fix pull replication of attachments from 0.11 to 1.0.x. - * Make the _changes feed work with non-integer seqnums. - -HTTP Interface: - - * Expose `committed_update_seq` for monitoring purposes. - * Show fields saved along with _deleted=true. Allows for auditing of deletes. - * More robust Accept-header detection. - -Authentication: - - * Enable basic-auth popup when required to access the server, to prevent - people from getting locked out. - -Futon: - - * User interface element for querying stale (cached) views. - -Build and System Integration: - - * Included additional source files for distribution. - -Version 1.0.0 -------------- - -Security: - - * Added authentication caching, to avoid repeated opening and closing of the - users database for each request requiring authentication. - -Storage System: - - * Small optimization for reordering result lists. - * More efficient header commits. - * Use O_APPEND to save lseeks. - * Faster implementation of pread_iolist(). Further improves performance on - concurrent reads. - -View Server: - - * Faster default view collation. - * Added option to include update_seq in view responses. - -Version 0.11.2 --------------- - -Replicator: - - * Fix bug when pushing design docs by non-admins, which was hanging the - replicator for no good reason. - * Fix bug when pulling design documents from a source that requires - basic-auth. - -HTTP Interface: - - * Better error messages on invalid URL requests. - -Authentication: - - * User documents can now be deleted by admins or the user. - -Security: - - * Avoid potential DOS attack by guarding all creation of atoms. - -Futon: - - * Add some Futon files that were missing from the Makefile. - -Version 0.11.1 --------------- - -HTTP Interface: - - * Mask passwords in active tasks and logging. - * Update mochijson2 to allow output of BigNums not in float form. - * Added support for X-HTTP-METHOD-OVERRIDE. - * Better error message for database names. - * Disable jsonp by default. - * Accept gzip encoded standalone attachments. - * Made max_concurrent_connections configurable. - * Made changes API more robust. - * Send newly generated document rev to callers of an update function. - -Futon: - - * Use "expando links" for over-long document values in Futon. - * Added continuous replication option. - * Added option to replicating test results anonymously to a community - CouchDB instance. - * Allow creation and deletion of config entries. - * Fixed display issues with doc ids that have escaped characters. - * Fixed various UI issues. - -Build and System Integration: - - * Output of `couchdb --help` has been improved. - * Fixed compatibility with the Erlang R14 series. - * Fixed warnings on Linux builds. - * Fixed build error when aclocal needs to be called during the build. - * Require ICU 4.3.1. - * Fixed compatibility with Solaris. - -Security: - - * Added authentication redirect URL to log in clients. - * Fixed query parameter encoding issue in oauth.js. - * Made authentication timeout configurable. - * Temporary views are now admin-only resources. - -Storage System: - - * Don't require a revpos for attachment stubs. - * Added checking to ensure when a revpos is sent with an attachment stub, - it's correct. - * Make file deletions async to avoid pauses during compaction and db - deletion. - * Fixed for wrong offset when writing headers and converting them to blocks, - only triggered when header is larger than 4k. - * Preserve _revs_limit and instance_start_time after compaction. - -Configuration System: - - * Fixed timeout with large .ini files. - -JavaScript Clients: - - * Added tests for couch.js and jquery.couch.js - * Added changes handler to jquery.couch.js. - * Added cache busting to jquery.couch.js if the user agent is msie. - * Added support for multi-document-fetch (via _all_docs) to jquery.couch.js. - * Added attachment versioning to jquery.couch.js. - * Added option to control ensure_full_commit to jquery.couch.js. - * Added list functionality to jquery.couch.js. - * Fixed issues where bulkSave() wasn't sending a POST body. - -View Server: - - * Provide a UUID to update functions (and all other functions) that they can - use to create new docs. - * Upgrade CommonJS modules support to 1.1.1. - * Fixed erlang filter funs and normalize filter fun API. - * Fixed hang in view shutdown. - -Log System: - - * Log HEAD requests as HEAD, not GET. - * Keep massive JSON blobs out of the error log. - * Fixed a timeout issue. - -Replication System: - - * Refactored various internal APIs related to attachment streaming. - * Fixed hanging replication. - * Fixed keepalive issue. - -URL Rewriter & Vhosts: - - * Allow more complex keys in rewriter. - * Allow global rewrites so system defaults are available in vhosts. - * Allow isolation of databases with vhosts. - * Fix issue with passing variables to query parameters. - -Test Suite: - - * Made the test suite overall more reliable. - -Version 0.11.0 --------------- - -Security: - - * Fixed CVE-2010-0009: Apache CouchDB Timing Attack Vulnerability. - * Added default cookie-authentication and users database. - * Added Futon user interface for user signup and login. - * Added per-database reader access control lists. - * Added per-database security object for configuration data in validation - functions. - * Added proxy authentication handler - -HTTP Interface: - - * Provide Content-MD5 header support for attachments. - * Added URL Rewriter handler. - * Added virtual host handling. - -View Server: - - * Added optional 'raw' binary collation for faster view builds where Unicode - collation is not important. - * Improved view index build time by reducing ICU collation callouts. - * Improved view information objects. - * Bug fix for partial updates during view builds. - * Move query server to a design-doc based protocol. - * Use json2.js for JSON serialization for compatiblity with native JSON. - * Major refactoring of couchjs to lay the groundwork for disabling cURL - support. The new HTTP interaction acts like a synchronous XHR. Example usage - of the new system is in the JavaScript CLI test runner. - -Replication: - - * Added option to implicitly create replication target databases. - * Avoid leaking file descriptors on automatic replication restarts. - * Added option to replicate a list of documents by id. - * Allow continuous replication to be cancelled. - -Storage System: - - * Adds batching of multiple updating requests, to improve throughput with many - writers. Removed the now redundant couch_batch_save module. - * Adds configurable compression of attachments. - -Runtime Statistics: - - * Statistics are now calculated for a moving window instead of non-overlapping - timeframes. - * Fixed a problem with statistics timers and system sleep. - * Moved statistic names to a term file in the priv directory. - -Futon: - - * Added a button for view compaction. - * JSON strings are now displayed as-is in the document view, without the escaping of - new-lines and quotes. That dramatically improves readability of multi-line - strings. - * Same goes for editing of JSON string values. When a change to a field value is - submitted, and the value is not valid JSON it is assumed to be a string. This - improves editing of multi-line strings a lot. - * Hitting tab in textareas no longer moves focus to the next form field, but simply - inserts a tab character at the current caret position. - * Fixed some font declarations. - -Build and System Integration: - - * Updated and improved source documentation. - * Fixed distribution preparation for building on Mac OS X. - * Added support for building a Windows installer as part of 'make dist'. - * Bug fix for building couch.app's module list. - * ETap tests are now run during make distcheck. This included a number of - updates to the build system to properly support VPATH builds. - * Gavin McDonald setup a build-bot instance. More info can be found at - http://ci.apache.org/buildbot.html - -Version 0.10.1 --------------- - -Replicator: - - * Stability enhancements regarding redirects, timeouts, OAuth. - -Query Server: - - * Avoid process leaks - * Allow list and view to span languages - -Stats: - - * Eliminate new process flood on system wake - -Build and System Integration: - - * Test suite now works with the distcheck target. - -Version 0.10.0 --------------- - -Storage Format: - - * Add move headers with checksums to the end of database files for extra robust - storage and faster storage. - -View Server: - - * Added native Erlang views for high-performance applications. - -HTTP Interface: - - * Added optional cookie-based authentication handler. - * Added optional two-legged OAuth authentication handler. - -Build and System Integration: - - * Changed `couchdb` script configuration options. - * Added default.d and local.d configuration directories to load sequence. - - -Version 0.9.2 -------------- - -Replication: - - * Fix replication with 0.10 servers initiated by an 0.9 server (COUCHDB-559). - -Build and System Integration: - - * Remove branch callbacks to allow building couchjs against newer versions of - Spidermonkey. - -Version 0.9.1 -------------- - -Build and System Integration: - - * PID file directory is now created by the SysV/BSD daemon scripts. - * Fixed the environment variables shown by the configure script. - * Fixed the build instructions shown by the configure script. - * Updated ownership and permission advice in `README` for better security. - -Configuration and stats system: - - * Corrected missing configuration file error message. - * Fixed incorrect recording of request time. - -Database Core: - - * Document validation for underscore prefixed variables. - * Made attachment storage less sparse. - * Fixed problems when a database with delayed commits pending is considered - idle, and subject to losing changes when shutdown. (COUCHDB-334) - -External Handlers: - - * Fix POST requests. - -Futon: - - * Redirect when loading a deleted view URI from the cookie. - -HTTP Interface: - - * Attachment requests respect the "rev" query-string parameter. - -JavaScript View Server: - - * Useful JavaScript Error messages. - -Replication: - - * Added support for Unicode characters transmitted as UTF-16 surrogate pairs. - * URL-encode attachment names when necessary. - * Pull specific revisions of an attachment, instead of just the latest one. - * Work around a rare chunk-merging problem in ibrowse. - * Work with documents containing Unicode characters outside the Basic - Multilingual Plane. - -Version 0.9.0 -------------- - -Futon Utility Client: - - * Added pagination to the database listing page. - * Implemented attachment uploading from the document page. - * Added page that shows the current configuration, and allows modification of - option values. - * Added a JSON "source view" for document display. - * JSON data in view rows is now syntax highlighted. - * Removed the use of an iframe for better integration with browser history and - bookmarking. - * Full database listing in the sidebar has been replaced by a short list of - recent databases. - * The view editor now allows selection of the view language if there is more - than one configured. - * Added links to go to the raw view or document URI. - * Added status page to display currently running tasks in CouchDB. - * JavaScript test suite split into multiple files. - * Pagination for reduce views. - -Design Document Resource Paths: - - * Added httpd_design_handlers config section. - * Moved _view to httpd_design_handlers. - * Added ability to render documents as non-JSON content-types with _show and - _list functions, which are also httpd_design_handlers. - -HTTP Interface: - - * Added client side UUIDs for idempotent document creation - * HTTP COPY for documents - * Streaming of chunked attachment PUTs to disk - * Remove negative count feature - * Add include_docs option for view queries - * Add multi-key view post for views - * Query parameter validation - * Use stale=ok to request potentially cached view index - * External query handler module for full-text or other indexers. - * Etags for attachments, views, shows and lists - * Show and list functions for rendering documents and views as developer - controlled content-types. - * Attachment names may use slashes to allow uploading of nested directories - (useful for static web hosting). - * Option for a view to run over design documents. - * Added newline to JSON responses. Closes bike-shed. - -Replication: - - * Using ibrowse. - * Checkpoint replications so failures are less expensive. - * Automatically retry of failed replications. - * Stream attachments in pull-replication. - -Database Core: - - * Faster B-tree implementation. - * Changed internal JSON term format. - * Improvements to Erlang VM interactions under heavy load. - * User context and administrator role. - * Update validations with design document validation functions. - * Document purge functionality. - * Ref-counting for database file handles. - -Build and System Integration: - - * The `couchdb` script now supports system chainable configuration files. - * The Mac OS X daemon script now redirects STDOUT and STDERR like SysV/BSD. - * The build and system integration have been improved for portability. - * Added COUCHDB_OPTIONS to etc/default/couchdb file. - * Remove COUCHDB_INI_FILE and COUCHDB_PID_FILE from etc/default/couchdb file. - * Updated `configure.ac` to manually link `libm` for portability. - * Updated `configure.ac` to extended default library paths. - * Removed inets configuration files. - * Added command line test runner. - * Created dev target for make. - -Configuration and stats system: - - * Separate default and local configuration files. - * HTTP interface for configuration changes. - * Statistics framework with HTTP query API. - -Version 0.8.1-incubating ------------------------- - -Database Core: - - * Fix for replication problems where the write queues can get backed up if the - writes aren't happening fast enough to keep up with the reads. For a large - replication, this can exhaust memory and crash, or slow down the machine - dramatically. The fix keeps only one document in the write queue at a time. - * Fix for databases sometimes incorrectly reporting that they contain 0 - documents after compaction. - * CouchDB now uses ibrowse instead of inets for its internal HTTP client - implementation. This means better replication stability. - -HTTP Interface: - - * Fix for chunked responses where chunks were always being split into multiple - TCP packets, which caused problems with the test suite under Safari, and in - some other cases. - * Fix for an invalid JSON response body being returned for some kinds of - views. (COUCHDB-84) - * Fix for connections not getting closed after rejecting a chunked request. - (COUCHDB-55) - * CouchDB can now be bound to IPv6 addresses. - * The HTTP `Server` header now contains the versions of CouchDB and Erlang. - -JavaScript View Server: - - * Sealing of documents has been disabled due to an incompatibility with - SpiderMonkey 1.9. - * Improve error handling for undefined values emitted by map functions. - (COUCHDB-83) - -Build and System Integration: - - * The `couchdb` script no longer uses `awk` for configuration checks as this - was causing portability problems. - * Updated `sudo` example in `README` to use the `-i` option, this fixes - problems when invoking from a directory the `couchdb` user cannot access. - -Futon: - - * The view selector dropdown should now work in Opera and Internet Explorer - even when it includes optgroups for design documents. (COUCHDB-81) - -Version 0.8.0-incubating ------------------------- - -Database Core: - - * The view engine has been completely decoupled from the storage engine. Index - data is now stored in separate files, and the format of the main database - file has changed. - * Databases can now be compacted to reclaim space used for deleted documents - and old document revisions. - * Support for incremental map/reduce views has been added. - * To support map/reduce, the structure of design documents has changed. View - values are now JSON objects containing at least a `map` member, and - optionally a `reduce` member. - * View servers are now identified by name (for example `javascript`) instead of - by media type. - * Automatically generated document IDs are now based on proper UUID generation - using the crypto module. - * The field `content-type` in the JSON representation of attachments has been - renamed to `content_type` (underscore). - -HTTP Interface: - - * CouchDB now uses MochiWeb instead of inets for the HTTP server - implementation. Among other things, this means that the extra configuration - files needed for inets (such as `couch_httpd.conf`) are no longer used. - * The HTTP interface now completely supports the `HEAD` method. (COUCHDB-3) - * Improved compliance of `Etag` handling with the HTTP specification. - (COUCHDB-13) - * Etags are no longer included in responses to document `GET` requests that - include query string parameters causing the JSON response to change without - the revision or the URI having changed. - * The bulk document update API has changed slightly on both the request and the - response side. In addition, bulk updates are now atomic. - * CouchDB now uses `TCP_NODELAY` to fix performance problems with persistent - connections on some platforms due to nagling. - * Including a `?descending=false` query string parameter in requests to views - no longer raises an error. - * Requests to unknown top-level reserved URLs (anything with a leading - underscore) now return a `unknown_private_path` error instead of the - confusing `illegal_database_name`. - * The Temporary view handling now expects a JSON request body, where the JSON - is an object with at least a `map` member, and optional `reduce` and - `language` members. - * Temporary views no longer determine the view server based on the Content-Type - header of the `POST` request, but rather by looking for a `language` member - in the JSON body of the request. - * The status code of responses to `DELETE` requests is now 200 to reflect that - that the deletion is performed synchronously. - -JavaScript View Server: - - * SpiderMonkey is no longer included with CouchDB, but rather treated as a - normal external dependency. A simple C program (`_couchjs`) is provided that - links against an existing SpiderMonkey installation and uses the interpreter - embedding API. - * View functions using the default JavaScript view server can now do logging - using the global `log(message)` function. Log messages are directed into the - CouchDB log at `INFO` level. (COUCHDB-59) - * The global `map(key, value)` function made available to view code has been - renamed to `emit(key, value)`. - * Fixed handling of exceptions raised by view functions. - -Build and System Integration: - - * CouchDB can automatically respawn following a server crash. - * Database server no longer refuses to start with a stale PID file. - * System logrotate configuration provided. - * Improved handling of ICU shared libraries. - * The `couchdb` script now automatically enables SMP support in Erlang. - * The `couchdb` and `couchjs` scripts have been improved for portability. - * The build and system integration have been improved for portability. - -Futon: - - * When adding a field to a document, Futon now just adds a field with an - autogenerated name instead of prompting for the name with a dialog. The name - is automatically put into edit mode so that it can be changed immediately. - * Fields are now sorted alphabetically by name when a document is displayed. - * Futon can be used to create and update permanent views. - * The maximum number of rows to display per page on the database page can now - be adjusted. - * Futon now uses the XMLHTTPRequest API asynchronously to communicate with the - CouchDB HTTP server, so that most operations no longer block the browser. - * View results sorting can now be switched between ascending and descending by - clicking on the `Key` column header. - * Fixed a bug where documents that contained a `@` character could not be - viewed. (COUCHDB-12) - * The database page now provides a `Compact` button to trigger database - compaction. (COUCHDB-38) - * Fixed portential double encoding of document IDs and other URI segments in - many instances. (COUCHDB-39) - * Improved display of attachments. - * The JavaScript Shell has been removed due to unresolved licensing issues. diff -Nru couchdb-1.2.0/config.h.in couchdb-1.4.0~rc.1/config.h.in --- couchdb-1.2.0/config.h.in 2012-03-29 17:05:37.000000000 -0400 +++ couchdb-1.4.0~rc.1/config.h.in 2013-08-23 10:57:39.000000000 -0400 @@ -1,5 +1,8 @@ /* config.h.in. Generated from configure.ac by autoheader. */ +/* Define if building universal (internal helper macro) */ +#undef AC_APPLE_UNIVERSAL_BUILD + /* "CouchJS executable name." */ #undef COUCHJS_NAME @@ -76,6 +79,9 @@ /* Define to the one symbol short name of this package. */ #undef PACKAGE_TARNAME +/* Define to the home page for this package. */ +#undef PACKAGE_URL + /* Define to the version of this package. */ #undef PACKAGE_VERSION @@ -88,14 +94,49 @@ /* Define to 1 if you have the ANSI C header files. */ #undef STDC_HEADERS -/* Version number of package */ -#undef VERSION - -/* Define to 1 if your processor stores words with the most significant byte - first (like Motorola and SPARC, unlike Intel and VAX). */ -#undef WORDS_BIGENDIAN - +/* Enable extensions on AIX 3, Interix. */ +#ifndef _ALL_SOURCE +# undef _ALL_SOURCE +#endif /* Enable GNU extensions on systems that have them. */ #ifndef _GNU_SOURCE # undef _GNU_SOURCE #endif +/* Enable threading extensions on Solaris. */ +#ifndef _POSIX_PTHREAD_SEMANTICS +# undef _POSIX_PTHREAD_SEMANTICS +#endif +/* Enable extensions on HP NonStop. */ +#ifndef _TANDEM_SOURCE +# undef _TANDEM_SOURCE +#endif +/* Enable general extensions on Solaris. */ +#ifndef __EXTENSIONS__ +# undef __EXTENSIONS__ +#endif + + +/* Version number of package */ +#undef VERSION + +/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most + significant byte first (like Motorola and SPARC, unlike Intel). */ +#if defined AC_APPLE_UNIVERSAL_BUILD +# if defined __BIG_ENDIAN__ +# define WORDS_BIGENDIAN 1 +# endif +#else +# ifndef WORDS_BIGENDIAN +# undef WORDS_BIGENDIAN +# endif +#endif + +/* Define to 1 if on MINIX. */ +#undef _MINIX + +/* Define to 2 if the system does not provide POSIX.1 features except with + this defined. */ +#undef _POSIX_1_SOURCE + +/* Define to 1 if you need to in order for `stat' and other things to work. */ +#undef _POSIX_SOURCE diff -Nru couchdb-1.2.0/configure couchdb-1.4.0~rc.1/configure --- couchdb-1.2.0/configure 2012-03-29 17:05:41.000000000 -0400 +++ couchdb-1.4.0~rc.1/configure 2013-08-23 10:57:45.000000000 -0400 @@ -1,63 +1,84 @@ #! /bin/sh -# From configure.ac 1.2.0. +# From configure.ac 1.4.0. # Guess values for system-dependent variables and create Makefiles. -# Generated by GNU Autoconf 2.61 for Apache CouchDB 1.2.0. +# Generated by GNU Autoconf 2.69 for Apache CouchDB 1.4.0. # # Report bugs to . # -# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, -# 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc. +# +# Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc. +# +# # This configure script is free software; the Free Software Foundation # gives unlimited permission to copy, distribute and modify it. -## --------------------- ## -## M4sh Initialization. ## -## --------------------- ## +## -------------------- ## +## M4sh Initialization. ## +## -------------------- ## # Be more Bourne compatible DUALCASE=1; export DUALCASE # for MKS sh -if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then +if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: - # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which + # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' setopt NO_GLOB_SUBST else - case `(set -o) 2>/dev/null` in - *posix*) set -o posix ;; + case `(set -o) 2>/dev/null` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; esac - fi - - -# PATH needs CR -# Avoid depending upon Character Ranges. -as_cr_letters='abcdefghijklmnopqrstuvwxyz' -as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' -as_cr_Letters=$as_cr_letters$as_cr_LETTERS -as_cr_digits='0123456789' -as_cr_alnum=$as_cr_Letters$as_cr_digits - -# The user is always right. -if test "${PATH_SEPARATOR+set}" != set; then - echo "#! /bin/sh" >conf$$.sh - echo "exit 0" >>conf$$.sh - chmod +x conf$$.sh - if (PATH="/nonexistent;."; conf$$.sh) >/dev/null 2>&1; then - PATH_SEPARATOR=';' +as_nl=' +' +export as_nl +# Printing a long string crashes Solaris 7 /usr/bin/printf. +as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo +# Prefer a ksh shell builtin over an external printf program on Solaris, +# but without wasting forks for bash or zsh. +if test -z "$BASH_VERSION$ZSH_VERSION" \ + && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='print -r --' + as_echo_n='print -rn --' +elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='printf %s\n' + as_echo_n='printf %s' +else + if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then + as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' + as_echo_n='/usr/ucb/echo -n' else - PATH_SEPARATOR=: + as_echo_body='eval expr "X$1" : "X\\(.*\\)"' + as_echo_n_body='eval + arg=$1; + case $arg in #( + *"$as_nl"*) + expr "X$arg" : "X\\(.*\\)$as_nl"; + arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; + esac; + expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" + ' + export as_echo_n_body + as_echo_n='sh -c $as_echo_n_body as_echo' fi - rm -f conf$$.sh + export as_echo_body + as_echo='sh -c $as_echo_body as_echo' fi -# Support unset when possible. -if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then - as_unset=unset -else - as_unset=false +# The user is always right. +if test "${PATH_SEPARATOR+set}" != set; then + PATH_SEPARATOR=: + (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { + (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || + PATH_SEPARATOR=';' + } fi @@ -66,20 +87,19 @@ # there to prevent editors from complaining about space-tab. # (If _AS_PATH_WALK were called with IFS unset, it would disable word # splitting by setting IFS to empty value.) -as_nl=' -' IFS=" "" $as_nl" # Find who we are. Look in the path if we contain no directory separator. -case $0 in +as_myself= +case $0 in #(( *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break -done + test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break + done IFS=$as_save_IFS ;; @@ -90,32 +110,324 @@ as_myself=$0 fi if test ! -f "$as_myself"; then - echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 - { (exit 1); exit 1; } + $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 + exit 1 fi -# Work around bugs in pre-3.0 UWIN ksh. -for as_var in ENV MAIL MAILPATH -do ($as_unset $as_var) >/dev/null 2>&1 && $as_unset $as_var +# Unset variables that we do not need and which cause bugs (e.g. in +# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" +# suppresses any "Segmentation fault" message there. '((' could +# trigger a bug in pdksh 5.2.14. +for as_var in BASH_ENV ENV MAIL MAILPATH +do eval test x\${$as_var+set} = xset \ + && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : done PS1='$ ' PS2='> ' PS4='+ ' # NLS nuisances. -for as_var in \ - LANG LANGUAGE LC_ADDRESS LC_ALL LC_COLLATE LC_CTYPE LC_IDENTIFICATION \ - LC_MEASUREMENT LC_MESSAGES LC_MONETARY LC_NAME LC_NUMERIC LC_PAPER \ - LC_TELEPHONE LC_TIME +LC_ALL=C +export LC_ALL +LANGUAGE=C +export LANGUAGE + +# CDPATH. +(unset CDPATH) >/dev/null 2>&1 && unset CDPATH + +# Use a proper internal environment variable to ensure we don't fall + # into an infinite loop, continuously re-executing ourselves. + if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then + _as_can_reexec=no; export _as_can_reexec; + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed `exec'. +$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 +as_fn_exit 255 + fi + # We don't want this to propagate to other subprocesses. + { _as_can_reexec=; unset _as_can_reexec;} +if test "x$CONFIG_SHELL" = x; then + as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : + emulate sh + NULLCMD=: + # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which + # is contrary to our usage. Disable this feature. + alias -g '\${1+\"\$@\"}'='\"\$@\"' + setopt NO_GLOB_SUBST +else + case \`(set -o) 2>/dev/null\` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; +esac +fi +" + as_required="as_fn_return () { (exit \$1); } +as_fn_success () { as_fn_return 0; } +as_fn_failure () { as_fn_return 1; } +as_fn_ret_success () { return 0; } +as_fn_ret_failure () { return 1; } + +exitcode=0 +as_fn_success || { exitcode=1; echo as_fn_success failed.; } +as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } +as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } +as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } +if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : + +else + exitcode=1; echo positional parameters were not saved. +fi +test x\$exitcode = x0 || exit 1 +test -x / || exit 1" + as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO + as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO + eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && + test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1 +test \$(( 1 + 1 )) = 2 || exit 1 + + test -n \"\${ZSH_VERSION+set}\${BASH_VERSION+set}\" || ( + ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' + ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO + ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO + PATH=/empty FPATH=/empty; export PATH FPATH + test \"X\`printf %s \$ECHO\`\" = \"X\$ECHO\" \\ + || test \"X\`print -r -- \$ECHO\`\" = \"X\$ECHO\" ) || exit 1" + if (eval "$as_required") 2>/dev/null; then : + as_have_required=yes +else + as_have_required=no +fi + if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then : + +else + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +as_found=false +for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH do - if (set +x; test -z "`(eval $as_var=C; export $as_var) 2>&1`"); then - eval $as_var=C; export $as_var + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + as_found=: + case $as_dir in #( + /*) + for as_base in sh bash ksh sh5; do + # Try only shells that exist, to save several forks. + as_shell=$as_dir/$as_base + if { test -f "$as_shell" || test -f "$as_shell.exe"; } && + { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then : + CONFIG_SHELL=$as_shell as_have_required=yes + if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then : + break 2 +fi +fi + done;; + esac + as_found=false +done +$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } && + { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then : + CONFIG_SHELL=$SHELL as_have_required=yes +fi; } +IFS=$as_save_IFS + + + if test "x$CONFIG_SHELL" != x; then : + export CONFIG_SHELL + # We cannot yet assume a decent shell, so we have to provide a +# neutralization value for shells without unset; and this also +# works around shells that cannot unset nonexistent variables. +# Preserve -v and -x to the replacement shell. +BASH_ENV=/dev/null +ENV=/dev/null +(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV +case $- in # (((( + *v*x* | *x*v* ) as_opts=-vx ;; + *v* ) as_opts=-v ;; + *x* ) as_opts=-x ;; + * ) as_opts= ;; +esac +exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} +# Admittedly, this is quite paranoid, since all the known shells bail +# out after a failed `exec'. +$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 +exit 255 +fi + + if test x$as_have_required = xno; then : + $as_echo "$0: This script requires a shell more modern than all" + $as_echo "$0: the shells that I found on your system." + if test x${ZSH_VERSION+set} = xset ; then + $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should" + $as_echo "$0: be upgraded to zsh 4.3.4 or later." else - ($as_unset $as_var) >/dev/null 2>&1 && $as_unset $as_var + $as_echo "$0: Please tell bug-autoconf@gnu.org and https://issues.apache.org/jira/browse/COUCHDB +$0: about your system, including any error possibly output +$0: before this message. Then install a modern shell, or +$0: manually run the script under such a shell if you do +$0: have one." fi -done + exit 1 +fi +fi +fi +SHELL=${CONFIG_SHELL-/bin/sh} +export SHELL +# Unset more variables known to interfere with behavior of common tools. +CLICOLOR_FORCE= GREP_OPTIONS= +unset CLICOLOR_FORCE GREP_OPTIONS + +## --------------------- ## +## M4sh Shell Functions. ## +## --------------------- ## +# as_fn_unset VAR +# --------------- +# Portably unset VAR. +as_fn_unset () +{ + { eval $1=; unset $1;} +} +as_unset=as_fn_unset + +# as_fn_set_status STATUS +# ----------------------- +# Set $? to STATUS, without forking. +as_fn_set_status () +{ + return $1 +} # as_fn_set_status + +# as_fn_exit STATUS +# ----------------- +# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. +as_fn_exit () +{ + set +e + as_fn_set_status $1 + exit $1 +} # as_fn_exit + +# as_fn_mkdir_p +# ------------- +# Create "$as_dir" as a directory, including parents if necessary. +as_fn_mkdir_p () +{ + + case $as_dir in #( + -*) as_dir=./$as_dir;; + esac + test -d "$as_dir" || eval $as_mkdir_p || { + as_dirs= + while :; do + case $as_dir in #( + *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( + *) as_qdir=$as_dir;; + esac + as_dirs="'$as_qdir' $as_dirs" + as_dir=`$as_dirname -- "$as_dir" || +$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_dir" : 'X\(//\)[^/]' \| \ + X"$as_dir" : 'X\(//\)$' \| \ + X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$as_dir" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + test -d "$as_dir" && break + done + test -z "$as_dirs" || eval "mkdir $as_dirs" + } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" + + +} # as_fn_mkdir_p + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p +# as_fn_append VAR VALUE +# ---------------------- +# Append the text in VALUE to the end of the definition contained in VAR. Take +# advantage of any shell optimizations that allow amortized linear growth over +# repeated appends, instead of the typical quadratic growth present in naive +# implementations. +if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : + eval 'as_fn_append () + { + eval $1+=\$2 + }' +else + as_fn_append () + { + eval $1=\$$1\$2 + } +fi # as_fn_append + +# as_fn_arith ARG... +# ------------------ +# Perform arithmetic evaluation on the ARGs, and store the result in the +# global $as_val. Take advantage of shells that can avoid forks. The arguments +# must be portable across $(()) and expr. +if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : + eval 'as_fn_arith () + { + as_val=$(( $* )) + }' +else + as_fn_arith () + { + as_val=`expr "$@" || test $? -eq 1` + } +fi # as_fn_arith + + +# as_fn_error STATUS ERROR [LINENO LOG_FD] +# ---------------------------------------- +# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are +# provided, also output the error to LOG_FD, referencing LINENO. Then exit the +# script with STATUS, using 1 if that was 0. +as_fn_error () +{ + as_status=$1; test $as_status -eq 0 && as_status=1 + if test "$4"; then + as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 + fi + $as_echo "$as_me: error: $2" >&2 + as_fn_exit $as_status +} # as_fn_error -# Required to use basename. if expr a : '\(a\)' >/dev/null 2>&1 && test "X`expr 00001 : '.*\(...\)'`" = X001; then as_expr=expr @@ -129,13 +441,17 @@ as_basename=false fi +if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then + as_dirname=dirname +else + as_dirname=false +fi -# Name of the executable. as_me=`$as_basename -- "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)' \| . 2>/dev/null || -echo X/"$0" | +$as_echo X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/ q @@ -150,456 +466,136 @@ } s/.*/./; q'` -# CDPATH. -$as_unset CDPATH +# Avoid depending upon Character Ranges. +as_cr_letters='abcdefghijklmnopqrstuvwxyz' +as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' +as_cr_Letters=$as_cr_letters$as_cr_LETTERS +as_cr_digits='0123456789' +as_cr_alnum=$as_cr_Letters$as_cr_digits -if test "x$CONFIG_SHELL" = x; then - if (eval ":") 2>/dev/null; then - as_have_required=yes -else - as_have_required=no -fi + as_lineno_1=$LINENO as_lineno_1a=$LINENO + as_lineno_2=$LINENO as_lineno_2a=$LINENO + eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && + test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { + # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) + sed -n ' + p + /[$]LINENO/= + ' <$as_myself | + sed ' + s/[$]LINENO.*/&-/ + t lineno + b + :lineno + N + :loop + s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ + t loop + s/-\n.*// + ' >$as_me.lineno && + chmod +x "$as_me.lineno" || + { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } - if test $as_have_required = yes && (eval ": -(as_func_return () { - (exit \$1) -} -as_func_success () { - as_func_return 0 -} -as_func_failure () { - as_func_return 1 -} -as_func_ret_success () { - return 0 -} -as_func_ret_failure () { - return 1 + # If we had to re-execute with $CONFIG_SHELL, we're ensured to have + # already done that, so ensure we don't try to do so again and fall + # in an infinite loop. This has already happened in practice. + _as_can_reexec=no; export _as_can_reexec + # Don't try to exec as it changes $[0], causing all sort of problems + # (the dirname of $[0] is not the place where we might find the + # original and so on. Autoconf is especially sensitive to this). + . "./$as_me.lineno" + # Exit status is that of the last command. + exit } -exitcode=0 -if as_func_success; then - : +ECHO_C= ECHO_N= ECHO_T= +case `echo -n x` in #((((( +-n*) + case `echo 'xy\c'` in + *c*) ECHO_T=' ';; # ECHO_T is single tab character. + xy) ECHO_C='\c';; + *) echo `echo ksh88 bug on AIX 6.1` > /dev/null + ECHO_T=' ';; + esac;; +*) + ECHO_N='-n';; +esac + +rm -f conf$$ conf$$.exe conf$$.file +if test -d conf$$.dir; then + rm -f conf$$.dir/conf$$.file else - exitcode=1 - echo as_func_success failed. + rm -f conf$$.dir + mkdir conf$$.dir 2>/dev/null fi - -if as_func_failure; then - exitcode=1 - echo as_func_failure succeeded. +if (echo >conf$$.file) 2>/dev/null; then + if ln -s conf$$.file conf$$ 2>/dev/null; then + as_ln_s='ln -s' + # ... but there are two gotchas: + # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. + # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. + # In both cases, we have to default to `cp -pR'. + ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || + as_ln_s='cp -pR' + elif ln conf$$.file conf$$ 2>/dev/null; then + as_ln_s=ln + else + as_ln_s='cp -pR' + fi +else + as_ln_s='cp -pR' fi +rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file +rmdir conf$$.dir 2>/dev/null -if as_func_ret_success; then - : +if mkdir -p . 2>/dev/null; then + as_mkdir_p='mkdir -p "$as_dir"' else - exitcode=1 - echo as_func_ret_success failed. + test -d ./-p && rmdir ./-p + as_mkdir_p=false fi -if as_func_ret_failure; then - exitcode=1 - echo as_func_ret_failure succeeded. -fi +as_test_x='test -x' +as_executable_p=as_fn_executable_p -if ( set x; as_func_ret_success y && test x = \"\$1\" ); then - : -else - exitcode=1 - echo positional parameters were not saved. -fi +# Sed expression to map a string onto a valid CPP name. +as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" -test \$exitcode = 0) || { (exit 1); exit 1; } +# Sed expression to map a string onto a valid variable name. +as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" -( - as_lineno_1=\$LINENO - as_lineno_2=\$LINENO - test \"x\$as_lineno_1\" != \"x\$as_lineno_2\" && - test \"x\`expr \$as_lineno_1 + 1\`\" = \"x\$as_lineno_2\") || { (exit 1); exit 1; } +SHELL=${CONFIG_SHELL-/bin/sh} -( - test -n \"\${ZSH_VERSION+set}\${BASH_VERSION+set}\" || ( - ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' - ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO - ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO - PATH=/empty FPATH=/empty; export PATH FPATH - test \"X\`printf %s \$ECHO\`\" = \"X\$ECHO\" \\ - || test \"X\`print -r -- \$ECHO\`\" = \"X\$ECHO\" )) || { (exit 1); exit 1; } -") 2> /dev/null; then - : -else - as_candidate_shells= - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - case $as_dir in - /*) - for as_base in sh bash ksh sh5; do - as_candidate_shells="$as_candidate_shells $as_dir/$as_base" - done;; - esac -done -IFS=$as_save_IFS +test -n "$DJDIR" || exec 7<&0 &1 - for as_shell in $as_candidate_shells $SHELL; do - # Try only shells that exist, to save several forks. - if { test -f "$as_shell" || test -f "$as_shell.exe"; } && - { ("$as_shell") 2> /dev/null <<\_ASEOF -if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then - emulate sh - NULLCMD=: - # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which - # is contrary to our usage. Disable this feature. - alias -g '${1+"$@"}'='"$@"' - setopt NO_GLOB_SUBST -else - case `(set -o) 2>/dev/null` in - *posix*) set -o posix ;; -esac +# Name of the host. +# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, +# so uname gets run too. +ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` -fi +# +# Initializations. +# +ac_default_prefix=/usr/local +ac_clean_files= +ac_config_libobj_dir=. +LIBOBJS= +cross_compiling=no +subdirs= +MFLAGS= +MAKEFLAGS= +# Identity of this package. +PACKAGE_NAME='Apache CouchDB' +PACKAGE_TARNAME='apache-couchdb' +PACKAGE_VERSION='1.4.0' +PACKAGE_STRING='Apache CouchDB 1.4.0' +PACKAGE_BUGREPORT='https://issues.apache.org/jira/browse/COUCHDB' +PACKAGE_URL='' -: -_ASEOF -}; then - CONFIG_SHELL=$as_shell - as_have_required=yes - if { "$as_shell" 2> /dev/null <<\_ASEOF -if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then - emulate sh - NULLCMD=: - # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which - # is contrary to our usage. Disable this feature. - alias -g '${1+"$@"}'='"$@"' - setopt NO_GLOB_SUBST -else - case `(set -o) 2>/dev/null` in - *posix*) set -o posix ;; -esac - -fi - - -: -(as_func_return () { - (exit $1) -} -as_func_success () { - as_func_return 0 -} -as_func_failure () { - as_func_return 1 -} -as_func_ret_success () { - return 0 -} -as_func_ret_failure () { - return 1 -} - -exitcode=0 -if as_func_success; then - : -else - exitcode=1 - echo as_func_success failed. -fi - -if as_func_failure; then - exitcode=1 - echo as_func_failure succeeded. -fi - -if as_func_ret_success; then - : -else - exitcode=1 - echo as_func_ret_success failed. -fi - -if as_func_ret_failure; then - exitcode=1 - echo as_func_ret_failure succeeded. -fi - -if ( set x; as_func_ret_success y && test x = "$1" ); then - : -else - exitcode=1 - echo positional parameters were not saved. -fi - -test $exitcode = 0) || { (exit 1); exit 1; } - -( - as_lineno_1=$LINENO - as_lineno_2=$LINENO - test "x$as_lineno_1" != "x$as_lineno_2" && - test "x`expr $as_lineno_1 + 1`" = "x$as_lineno_2") || { (exit 1); exit 1; } - -( - test -n "${ZSH_VERSION+set}${BASH_VERSION+set}" || ( - ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' - ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO - ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO - PATH=/empty FPATH=/empty; export PATH FPATH - test "X`printf %s $ECHO`" = "X$ECHO" \ - || test "X`print -r -- $ECHO`" = "X$ECHO" )) || { (exit 1); exit 1; } - -_ASEOF -}; then - break -fi - -fi - - done - - if test "x$CONFIG_SHELL" != x; then - for as_var in BASH_ENV ENV - do ($as_unset $as_var) >/dev/null 2>&1 && $as_unset $as_var - done - export CONFIG_SHELL - exec "$CONFIG_SHELL" "$as_myself" ${1+"$@"} -fi - - - if test $as_have_required = no; then - echo This script requires a shell more modern than all the - echo shells that I found on your system. Please install a - echo modern shell, or manually run the script under such a - echo shell if you do have one. - { (exit 1); exit 1; } -fi - - -fi - -fi - - - -(eval "as_func_return () { - (exit \$1) -} -as_func_success () { - as_func_return 0 -} -as_func_failure () { - as_func_return 1 -} -as_func_ret_success () { - return 0 -} -as_func_ret_failure () { - return 1 -} - -exitcode=0 -if as_func_success; then - : -else - exitcode=1 - echo as_func_success failed. -fi - -if as_func_failure; then - exitcode=1 - echo as_func_failure succeeded. -fi - -if as_func_ret_success; then - : -else - exitcode=1 - echo as_func_ret_success failed. -fi - -if as_func_ret_failure; then - exitcode=1 - echo as_func_ret_failure succeeded. -fi - -if ( set x; as_func_ret_success y && test x = \"\$1\" ); then - : -else - exitcode=1 - echo positional parameters were not saved. -fi - -test \$exitcode = 0") || { - echo No shell found that supports shell functions. - echo Please tell autoconf@gnu.org about your system, - echo including any error possibly output before this - echo message -} - - - - as_lineno_1=$LINENO - as_lineno_2=$LINENO - test "x$as_lineno_1" != "x$as_lineno_2" && - test "x`expr $as_lineno_1 + 1`" = "x$as_lineno_2" || { - - # Create $as_me.lineno as a copy of $as_myself, but with $LINENO - # uniformly replaced by the line number. The first 'sed' inserts a - # line-number line after each line using $LINENO; the second 'sed' - # does the real work. The second script uses 'N' to pair each - # line-number line with the line containing $LINENO, and appends - # trailing '-' during substitution so that $LINENO is not a special - # case at line end. - # (Raja R Harinath suggested sed '=', and Paul Eggert wrote the - # scripts with optimization help from Paolo Bonzini. Blame Lee - # E. McMahon (1931-1989) for sed's syntax. :-) - sed -n ' - p - /[$]LINENO/= - ' <$as_myself | - sed ' - s/[$]LINENO.*/&-/ - t lineno - b - :lineno - N - :loop - s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ - t loop - s/-\n.*// - ' >$as_me.lineno && - chmod +x "$as_me.lineno" || - { echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2 - { (exit 1); exit 1; }; } - - # Don't try to exec as it changes $[0], causing all sort of problems - # (the dirname of $[0] is not the place where we might find the - # original and so on. Autoconf is especially sensitive to this). - . "./$as_me.lineno" - # Exit status is that of the last command. - exit -} - - -if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then - as_dirname=dirname -else - as_dirname=false -fi - -ECHO_C= ECHO_N= ECHO_T= -case `echo -n x` in --n*) - case `echo 'x\c'` in - *c*) ECHO_T=' ';; # ECHO_T is single tab character. - *) ECHO_C='\c';; - esac;; -*) - ECHO_N='-n';; -esac - -if expr a : '\(a\)' >/dev/null 2>&1 && - test "X`expr 00001 : '.*\(...\)'`" = X001; then - as_expr=expr -else - as_expr=false -fi - -rm -f conf$$ conf$$.exe conf$$.file -if test -d conf$$.dir; then - rm -f conf$$.dir/conf$$.file -else - rm -f conf$$.dir - mkdir conf$$.dir -fi -echo >conf$$.file -if ln -s conf$$.file conf$$ 2>/dev/null; then - as_ln_s='ln -s' - # ... but there are two gotchas: - # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. - # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -p'. - ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -p' -elif ln conf$$.file conf$$ 2>/dev/null; then - as_ln_s=ln -else - as_ln_s='cp -p' -fi -rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file -rmdir conf$$.dir 2>/dev/null - -if mkdir -p . 2>/dev/null; then - as_mkdir_p=: -else - test -d ./-p && rmdir ./-p - as_mkdir_p=false -fi - -if test -x / >/dev/null 2>&1; then - as_test_x='test -x' -else - if ls -dL / >/dev/null 2>&1; then - as_ls_L_option=L - else - as_ls_L_option= - fi - as_test_x=' - eval sh -c '\'' - if test -d "$1"; then - test -d "$1/."; - else - case $1 in - -*)set "./$1";; - esac; - case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in - ???[sx]*):;;*)false;;esac;fi - '\'' sh - ' -fi -as_executable_p=$as_test_x - -# Sed expression to map a string onto a valid CPP name. -as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" - -# Sed expression to map a string onto a valid variable name. -as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" - - -SHELL=${CONFIG_SHELL-/bin/sh} - - -exec 7<&0 &1 - -# Name of the host. -# hostname on some systems (SVR3.2, Linux) returns a bogus exit status, -# so uname gets run too. -ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` - -# -# Initializations. -# -ac_default_prefix=/usr/local -ac_clean_files= -ac_config_libobj_dir=. -LIBOBJS= -cross_compiling=no -subdirs= -MFLAGS= -MAKEFLAGS= -SHELL=${CONFIG_SHELL-/bin/sh} - -# Identity of this package. -PACKAGE_NAME='Apache CouchDB' -PACKAGE_TARNAME='apache-couchdb' -PACKAGE_VERSION='1.2.0' -PACKAGE_STRING='Apache CouchDB 1.2.0' -PACKAGE_BUGREPORT='https://issues.apache.org/jira/browse/COUCHDB' - -ac_unique_file="CHANGES" # Factoring default headers for most tests. ac_includes_default="\ #include @@ -636,225 +632,272 @@ # include #endif" -ac_subst_vars='SHELL -PATH_SEPARATOR -PACKAGE_NAME -PACKAGE_TARNAME -PACKAGE_VERSION -PACKAGE_STRING -PACKAGE_BUGREPORT -exec_prefix -prefix -program_transform_name -bindir -sbindir -libexecdir -datarootdir -datadir -sysconfdir -sharedstatedir -localstatedir -includedir -oldincludedir -docdir -infodir -htmldir -dvidir -pdfdir -psdir -libdir -localedir -mandir -DEFS -ECHO_C -ECHO_N -ECHO_T -LIBS -build_alias -host_alias -target_alias -INSTALL_PROGRAM -INSTALL_SCRIPT -INSTALL_DATA -am__isrc -CYGPATH_W -PACKAGE -VERSION -ACLOCAL -AUTOCONF -AUTOMAKE -AUTOHEADER -MAKEINFO -install_sh -STRIP -INSTALL_STRIP_PROGRAM -mkdir_p -AWK -SET_MAKE -am__leading_dot -AMTAR -am__tar -am__untar -CC -CFLAGS -LDFLAGS -CPPFLAGS -ac_ct_CC -EXEEXT -OBJEXT -DEPDIR -am__include -am__quote -AMDEP_TRUE -AMDEP_FALSE -AMDEPBACKSLASH -CCDEPMODE -am__fastdepCC_TRUE -am__fastdepCC_FALSE -AS -DLLTOOL -OBJDUMP -LIBTOOL -build -build_cpu -build_vendor -build_os -host -host_cpu -host_vendor -host_os +ac_subst_vars='am__EXEEXT_FALSE +am__EXEEXT_TRUE +LTLIBOBJS +LIBOBJS +abs_top_builddir +abs_top_srcdir +localerlanglibdir +locallibbindir +localstaterundir +localstatelogdir +localstatelibdir +locallibdir +localdocdir +localdatadir +localconfdir +bug_uri +version_release +version_stage +version_revision +version_minor +version_major +version +package_name +package_tarname +package_identifier +package_author_address +package_author_name +STRICTNESS_FALSE +STRICTNESS_TRUE +TESTS_FALSE +TESTS_TRUE +BUILD_HTML_FALSE +BUILD_HTML_TRUE +BUILD_PDF_FALSE +BUILD_PDF_TRUE +BUILD_INFO_FALSE +BUILD_INFO_TRUE +BUILD_MAN_FALSE +BUILD_MAN_TRUE +USE_CURL_FALSE +USE_CURL_TRUE +USE_NATIVE_MOCHIJSON_FALSE +USE_NATIVE_MOCHIJSON_TRUE +LAUNCHD_FALSE +LAUNCHD_TRUE +INIT_FALSE +INIT_TRUE +HAS_SPHINX_BUILD +HAS_INSTALLINFO +HAS_MAKEINFO +HAS_PDFLATEX +HAS_HELP2MAN +launchddir +initdir +ERLC +USE_EJSON_COMPARE_NIF_FALSE +USE_EJSON_COMPARE_NIF_TRUE +USE_OTP_NIFS_FALSE +USE_OTP_NIFS_TRUE +otp_release +CURL_LIBS +CURL_CFLAGS +CURL_CONFIG +ICU_BIN +ICU_LIBS +ICU_CXXFLAGS +ICU_CFLAGS +ICU_CPPFLAGS +ICU_CONFIG +msvc_redist_name +msvc_redist_dir +INNO_COMPILER_EXECUTABLE +openssl_bin_dir +JS_LIB_BINARY +WINDOWS_FALSE +WINDOWS_TRUE +FLAGS +ERLC_FLAGS +JS_LIBS +JS_CFLAGS +ERLANG_FLAGS +ERL +SNAPPY_PATCHLEVEL +SNAPPY_MINOR +SNAPPY_MAJOR +ac_cv_have_stddef_h +ac_cv_have_stdint_h +CXXCPP +am__fastdepCXX_FALSE +am__fastdepCXX_TRUE +CXXDEPMODE +ac_ct_CXX +CXXFLAGS +CXX +PKG_CONFIG_LIBDIR +PKG_CONFIG_PATH +PKG_CONFIG +OTOOL64 +OTOOL +LIPO +NMEDIT +DSYMUTIL +MANIFEST_TOOL +RANLIB +ac_ct_AR +AR +LN_S +NM +ac_ct_DUMPBIN +DUMPBIN +LD +FGREP SED -GREP +host_os +host_vendor +host_cpu +host +build_os +build_vendor +build_cpu +build +LIBTOOL +OBJDUMP +DLLTOOL +AS EGREP -FGREP -LD -DUMPBIN -ac_ct_DUMPBIN -NM -LN_S -AR -RANLIB -DSYMUTIL -NMEDIT -LIPO -OTOOL -OTOOL64 +GREP +CPP +am__fastdepCC_FALSE +am__fastdepCC_TRUE +CCDEPMODE +am__nodep +AMDEPBACKSLASH +AMDEP_FALSE +AMDEP_TRUE +am__quote +am__include +DEPDIR +OBJEXT +EXEEXT +ac_ct_CC +CPPFLAGS +LDFLAGS +CFLAGS +CC +am__untar +am__tar +AMTAR +am__leading_dot +SET_MAKE +AWK +mkdir_p +MKDIR_P +INSTALL_STRIP_PROGRAM +STRIP +install_sh +MAKEINFO +AUTOHEADER +AUTOMAKE +AUTOCONF +ACLOCAL +VERSION +PACKAGE +CYGPATH_W +am__isrc +INSTALL_DATA +INSTALL_SCRIPT +INSTALL_PROGRAM +target_alias +host_alias +build_alias +LIBS +ECHO_T +ECHO_N +ECHO_C +DEFS +mandir +localedir +libdir +psdir +pdfdir +dvidir +htmldir +infodir +docdir +oldincludedir +includedir +localstatedir +sharedstatedir +sysconfdir +datadir +datarootdir +libexecdir +sbindir +bindir +program_transform_name +prefix +exec_prefix +PACKAGE_URL +PACKAGE_BUGREPORT +PACKAGE_STRING +PACKAGE_VERSION +PACKAGE_TARNAME +PACKAGE_NAME +PATH_SEPARATOR +SHELL' +ac_subst_files='' +ac_user_opts=' +enable_option_checking +enable_dependency_tracking +enable_shared +enable_static +with_pic +enable_fast_install +with_gnu_ld +with_sysroot +enable_libtool_lock +with_erlang +with_js_include +with_js_lib +enable_js_trunk +with_openssl_bin_dir +with_msvc_redist_dir +with_win32_icu_binaries +with_win32_curl +with_curl_prefix +with_curl_exec_prefix +enable_init +enable_launchd +enable_native_mochijson +enable_tests +enable_docs +enable_strictness +' + ac_precious_vars='build_alias +host_alias +target_alias +CC +CFLAGS +LDFLAGS +LIBS +CPPFLAGS CPP PKG_CONFIG PKG_CONFIG_PATH PKG_CONFIG_LIBDIR CXX CXXFLAGS -ac_ct_CXX -CXXDEPMODE -am__fastdepCXX_TRUE -am__fastdepCXX_FALSE +CCC CXXCPP -ac_cv_have_stdint_h -ac_cv_have_stddef_h -SNAPPY_MAJOR -SNAPPY_MINOR -SNAPPY_PATCHLEVEL -ERLC_FLAGS -FLAGS -ERL -ERLANG_FLAGS -JS185_CFLAGS -JS185_LIBS JS_CFLAGS JS_LIBS -WINDOWS_TRUE -WINDOWS_FALSE -JS_LIB_BINARY -openssl_bin_dir -INNO_COMPILER_EXECUTABLE -msvc_redist_dir -msvc_redist_name -JS_LDFLAGS -ICU_CONFIG -ICU_CFLAGS -ICU_CXXFLAGS -ICU_LIBS -ICU_CPPFLAGS -ICU_BIN +ERLC_FLAGS +FLAGS CURL_CONFIG CURL_CFLAGS CURL_LIBS -otp_release -USE_OTP_NIFS_TRUE -USE_OTP_NIFS_FALSE -USE_EJSON_COMPARE_NIF_TRUE -USE_EJSON_COMPARE_NIF_FALSE -ERLC -HELP2MAN_EXECUTABLE -initdir -launchddir -INIT_TRUE -INIT_FALSE -LAUNCHD_TRUE -LAUNCHD_FALSE -HELP2MAN_TRUE -HELP2MAN_FALSE -USE_NATIVE_MOCHIJSON_TRUE -USE_NATIVE_MOCHIJSON_FALSE -USE_CURL_TRUE -USE_CURL_FALSE -package_author_name -package_author_address -package_identifier -package_tarname -package_name -version -version_major -version_minor -version_revision -version_stage -version_release -bug_uri -localconfdir -localdatadir -localdocdir -locallibdir -localstatelibdir -localstatelogdir -localstaterundir -locallibbindir -localerlanglibdir -abs_top_srcdir -abs_top_builddir -LIBOBJS -LTLIBOBJS' -ac_subst_files='' - ac_precious_vars='build_alias -host_alias -target_alias -CC -CFLAGS -LDFLAGS -LIBS -CPPFLAGS -CPP -PKG_CONFIG -PKG_CONFIG_PATH -PKG_CONFIG_LIBDIR -CXX -CXXFLAGS -CCC -CXXCPP -ERLC_FLAGS -FLAGS -JS185_CFLAGS -JS185_LIBS -JS_CFLAGS -JS_LIBS ERL -ERLC -HELP2MAN_EXECUTABLE' +ERLC' # Initialize some variables set by options. ac_init_help= ac_init_version=false +ac_unrecognized_opts= +ac_unrecognized_sep= # The variables have the same names as the options, with # dashes changed to underlines. cache_file=/dev/null @@ -910,8 +953,9 @@ fi case $ac_option in - *=*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; - *) ac_optarg=yes ;; + *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; + *=) ac_optarg= ;; + *) ac_optarg=yes ;; esac # Accept the important Cygnus configure options, so we can diagnose typos. @@ -930,989 +974,3278 @@ -build=* | --build=* | --buil=* | --bui=* | --bu=*) build_alias=$ac_optarg ;; - -cache-file | --cache-file | --cache-fil | --cache-fi \ - | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) - ac_prev=cache_file ;; - -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ - | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) - cache_file=$ac_optarg ;; + -cache-file | --cache-file | --cache-fil | --cache-fi \ + | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) + ac_prev=cache_file ;; + -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ + | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) + cache_file=$ac_optarg ;; + + --config-cache | -C) + cache_file=config.cache ;; + + -datadir | --datadir | --datadi | --datad) + ac_prev=datadir ;; + -datadir=* | --datadir=* | --datadi=* | --datad=*) + datadir=$ac_optarg ;; + + -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ + | --dataroo | --dataro | --datar) + ac_prev=datarootdir ;; + -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ + | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) + datarootdir=$ac_optarg ;; + + -disable-* | --disable-*) + ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid feature name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"enable_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval enable_$ac_useropt=no ;; + + -docdir | --docdir | --docdi | --doc | --do) + ac_prev=docdir ;; + -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) + docdir=$ac_optarg ;; + + -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) + ac_prev=dvidir ;; + -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) + dvidir=$ac_optarg ;; + + -enable-* | --enable-*) + ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid feature name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"enable_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval enable_$ac_useropt=\$ac_optarg ;; + + -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ + | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ + | --exec | --exe | --ex) + ac_prev=exec_prefix ;; + -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ + | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ + | --exec=* | --exe=* | --ex=*) + exec_prefix=$ac_optarg ;; + + -gas | --gas | --ga | --g) + # Obsolete; use --with-gas. + with_gas=yes ;; + + -help | --help | --hel | --he | -h) + ac_init_help=long ;; + -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) + ac_init_help=recursive ;; + -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) + ac_init_help=short ;; + + -host | --host | --hos | --ho) + ac_prev=host_alias ;; + -host=* | --host=* | --hos=* | --ho=*) + host_alias=$ac_optarg ;; + + -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) + ac_prev=htmldir ;; + -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ + | --ht=*) + htmldir=$ac_optarg ;; + + -includedir | --includedir | --includedi | --included | --include \ + | --includ | --inclu | --incl | --inc) + ac_prev=includedir ;; + -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ + | --includ=* | --inclu=* | --incl=* | --inc=*) + includedir=$ac_optarg ;; + + -infodir | --infodir | --infodi | --infod | --info | --inf) + ac_prev=infodir ;; + -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) + infodir=$ac_optarg ;; + + -libdir | --libdir | --libdi | --libd) + ac_prev=libdir ;; + -libdir=* | --libdir=* | --libdi=* | --libd=*) + libdir=$ac_optarg ;; + + -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ + | --libexe | --libex | --libe) + ac_prev=libexecdir ;; + -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ + | --libexe=* | --libex=* | --libe=*) + libexecdir=$ac_optarg ;; + + -localedir | --localedir | --localedi | --localed | --locale) + ac_prev=localedir ;; + -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) + localedir=$ac_optarg ;; + + -localstatedir | --localstatedir | --localstatedi | --localstated \ + | --localstate | --localstat | --localsta | --localst | --locals) + ac_prev=localstatedir ;; + -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ + | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) + localstatedir=$ac_optarg ;; + + -mandir | --mandir | --mandi | --mand | --man | --ma | --m) + ac_prev=mandir ;; + -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) + mandir=$ac_optarg ;; + + -nfp | --nfp | --nf) + # Obsolete; use --without-fp. + with_fp=no ;; + + -no-create | --no-create | --no-creat | --no-crea | --no-cre \ + | --no-cr | --no-c | -n) + no_create=yes ;; + + -no-recursion | --no-recursion | --no-recursio | --no-recursi \ + | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) + no_recursion=yes ;; + + -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ + | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ + | --oldin | --oldi | --old | --ol | --o) + ac_prev=oldincludedir ;; + -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ + | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ + | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) + oldincludedir=$ac_optarg ;; + + -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) + ac_prev=prefix ;; + -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) + prefix=$ac_optarg ;; + + -program-prefix | --program-prefix | --program-prefi | --program-pref \ + | --program-pre | --program-pr | --program-p) + ac_prev=program_prefix ;; + -program-prefix=* | --program-prefix=* | --program-prefi=* \ + | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) + program_prefix=$ac_optarg ;; + + -program-suffix | --program-suffix | --program-suffi | --program-suff \ + | --program-suf | --program-su | --program-s) + ac_prev=program_suffix ;; + -program-suffix=* | --program-suffix=* | --program-suffi=* \ + | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) + program_suffix=$ac_optarg ;; + + -program-transform-name | --program-transform-name \ + | --program-transform-nam | --program-transform-na \ + | --program-transform-n | --program-transform- \ + | --program-transform | --program-transfor \ + | --program-transfo | --program-transf \ + | --program-trans | --program-tran \ + | --progr-tra | --program-tr | --program-t) + ac_prev=program_transform_name ;; + -program-transform-name=* | --program-transform-name=* \ + | --program-transform-nam=* | --program-transform-na=* \ + | --program-transform-n=* | --program-transform-=* \ + | --program-transform=* | --program-transfor=* \ + | --program-transfo=* | --program-transf=* \ + | --program-trans=* | --program-tran=* \ + | --progr-tra=* | --program-tr=* | --program-t=*) + program_transform_name=$ac_optarg ;; + + -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) + ac_prev=pdfdir ;; + -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) + pdfdir=$ac_optarg ;; + + -psdir | --psdir | --psdi | --psd | --ps) + ac_prev=psdir ;; + -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) + psdir=$ac_optarg ;; + + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil) + silent=yes ;; + + -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) + ac_prev=sbindir ;; + -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ + | --sbi=* | --sb=*) + sbindir=$ac_optarg ;; + + -sharedstatedir | --sharedstatedir | --sharedstatedi \ + | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ + | --sharedst | --shareds | --shared | --share | --shar \ + | --sha | --sh) + ac_prev=sharedstatedir ;; + -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ + | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ + | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ + | --sha=* | --sh=*) + sharedstatedir=$ac_optarg ;; + + -site | --site | --sit) + ac_prev=site ;; + -site=* | --site=* | --sit=*) + site=$ac_optarg ;; + + -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) + ac_prev=srcdir ;; + -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) + srcdir=$ac_optarg ;; + + -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ + | --syscon | --sysco | --sysc | --sys | --sy) + ac_prev=sysconfdir ;; + -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ + | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) + sysconfdir=$ac_optarg ;; + + -target | --target | --targe | --targ | --tar | --ta | --t) + ac_prev=target_alias ;; + -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) + target_alias=$ac_optarg ;; + + -v | -verbose | --verbose | --verbos | --verbo | --verb) + verbose=yes ;; + + -version | --version | --versio | --versi | --vers | -V) + ac_init_version=: ;; + + -with-* | --with-*) + ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid package name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"with_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval with_$ac_useropt=\$ac_optarg ;; + + -without-* | --without-*) + ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` + # Reject names that are not valid shell variable names. + expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && + as_fn_error $? "invalid package name: $ac_useropt" + ac_useropt_orig=$ac_useropt + ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` + case $ac_user_opts in + *" +"with_$ac_useropt" +"*) ;; + *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" + ac_unrecognized_sep=', ';; + esac + eval with_$ac_useropt=no ;; + + --x) + # Obsolete; use --with-x. + with_x=yes ;; + + -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ + | --x-incl | --x-inc | --x-in | --x-i) + ac_prev=x_includes ;; + -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ + | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) + x_includes=$ac_optarg ;; + + -x-libraries | --x-libraries | --x-librarie | --x-librari \ + | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) + ac_prev=x_libraries ;; + -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ + | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) + x_libraries=$ac_optarg ;; + + -*) as_fn_error $? "unrecognized option: \`$ac_option' +Try \`$0 --help' for more information" + ;; + + *=*) + ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` + # Reject names that are not valid shell variable names. + case $ac_envvar in #( + '' | [0-9]* | *[!_$as_cr_alnum]* ) + as_fn_error $? "invalid variable name: \`$ac_envvar'" ;; + esac + eval $ac_envvar=\$ac_optarg + export $ac_envvar ;; + + *) + # FIXME: should be removed in autoconf 3.0. + $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2 + expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && + $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2 + : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}" + ;; + + esac +done + +if test -n "$ac_prev"; then + ac_option=--`echo $ac_prev | sed 's/_/-/g'` + as_fn_error $? "missing argument to $ac_option" +fi + +if test -n "$ac_unrecognized_opts"; then + case $enable_option_checking in + no) ;; + fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; + *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; + esac +fi + +# Check all directory arguments for consistency. +for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ + datadir sysconfdir sharedstatedir localstatedir includedir \ + oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ + libdir localedir mandir +do + eval ac_val=\$$ac_var + # Remove trailing slashes. + case $ac_val in + */ ) + ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` + eval $ac_var=\$ac_val;; + esac + # Be sure to have absolute directory names. + case $ac_val in + [\\/$]* | ?:[\\/]* ) continue;; + NONE | '' ) case $ac_var in *prefix ) continue;; esac;; + esac + as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" +done + +# There might be people who depend on the old broken behavior: `$host' +# used to hold the argument of --host etc. +# FIXME: To remove some day. +build=$build_alias +host=$host_alias +target=$target_alias + +# FIXME: To remove some day. +if test "x$host_alias" != x; then + if test "x$build_alias" = x; then + cross_compiling=maybe + elif test "x$build_alias" != "x$host_alias"; then + cross_compiling=yes + fi +fi + +ac_tool_prefix= +test -n "$host_alias" && ac_tool_prefix=$host_alias- + +test "$silent" = yes && exec 6>/dev/null + + +ac_pwd=`pwd` && test -n "$ac_pwd" && +ac_ls_di=`ls -di .` && +ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || + as_fn_error $? "working directory cannot be determined" +test "X$ac_ls_di" = "X$ac_pwd_ls_di" || + as_fn_error $? "pwd does not report name of working directory" + + +# Find the source files, if location was not specified. +if test -z "$srcdir"; then + ac_srcdir_defaulted=yes + # Try the directory containing this script, then the parent directory. + ac_confdir=`$as_dirname -- "$as_myself" || +$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_myself" : 'X\(//\)[^/]' \| \ + X"$as_myself" : 'X\(//\)$' \| \ + X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$as_myself" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + srcdir=$ac_confdir + if test ! -r "$srcdir/$ac_unique_file"; then + srcdir=.. + fi +else + ac_srcdir_defaulted=no +fi +if test ! -r "$srcdir/$ac_unique_file"; then + test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." + as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" +fi +ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" +ac_abs_confdir=`( + cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" + pwd)` +# When building in place, set srcdir=. +if test "$ac_abs_confdir" = "$ac_pwd"; then + srcdir=. +fi +# Remove unnecessary trailing slashes from srcdir. +# Double slashes in file names in object file debugging info +# mess up M-x gdb in Emacs. +case $srcdir in +*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; +esac +for ac_var in $ac_precious_vars; do + eval ac_env_${ac_var}_set=\${${ac_var}+set} + eval ac_env_${ac_var}_value=\$${ac_var} + eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} + eval ac_cv_env_${ac_var}_value=\$${ac_var} +done + +# +# Report the --help message. +# +if test "$ac_init_help" = "long"; then + # Omit some internal or obsolete options to make the list less imposing. + # This message is too long to be a string in the A/UX 3.1 sh. + cat <<_ACEOF +\`configure' configures Apache CouchDB 1.4.0 to adapt to many kinds of systems. + +Usage: $0 [OPTION]... [VAR=VALUE]... + +To assign environment variables (e.g., CC, CFLAGS...), specify them as +VAR=VALUE. See below for descriptions of some of the useful variables. + +Defaults for the options are specified in brackets. + +Configuration: + -h, --help display this help and exit + --help=short display options specific to this package + --help=recursive display the short help of all the included packages + -V, --version display version information and exit + -q, --quiet, --silent do not print \`checking ...' messages + --cache-file=FILE cache test results in FILE [disabled] + -C, --config-cache alias for \`--cache-file=config.cache' + -n, --no-create do not create output files + --srcdir=DIR find the sources in DIR [configure dir or \`..'] + +Installation directories: + --prefix=PREFIX install architecture-independent files in PREFIX + [$ac_default_prefix] + --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX + [PREFIX] + +By default, \`make install' will install all the files in +\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify +an installation prefix other than \`$ac_default_prefix' using \`--prefix', +for instance \`--prefix=\$HOME'. + +For better control, use the options below. + +Fine tuning of the installation directories: + --bindir=DIR user executables [EPREFIX/bin] + --sbindir=DIR system admin executables [EPREFIX/sbin] + --libexecdir=DIR program executables [EPREFIX/libexec] + --sysconfdir=DIR read-only single-machine data [PREFIX/etc] + --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] + --localstatedir=DIR modifiable single-machine data [PREFIX/var] + --libdir=DIR object code libraries [EPREFIX/lib] + --includedir=DIR C header files [PREFIX/include] + --oldincludedir=DIR C header files for non-gcc [/usr/include] + --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] + --datadir=DIR read-only architecture-independent data [DATAROOTDIR] + --infodir=DIR info documentation [DATAROOTDIR/info] + --localedir=DIR locale-dependent data [DATAROOTDIR/locale] + --mandir=DIR man documentation [DATAROOTDIR/man] + --docdir=DIR documentation root [DATAROOTDIR/doc/apache-couchdb] + --htmldir=DIR html documentation [DOCDIR] + --dvidir=DIR dvi documentation [DOCDIR] + --pdfdir=DIR pdf documentation [DOCDIR] + --psdir=DIR ps documentation [DOCDIR] +_ACEOF + + cat <<\_ACEOF + +Program names: + --program-prefix=PREFIX prepend PREFIX to installed program names + --program-suffix=SUFFIX append SUFFIX to installed program names + --program-transform-name=PROGRAM run sed PROGRAM on installed program names + +System types: + --build=BUILD configure for building on BUILD [guessed] + --host=HOST cross-compile to build programs to run on HOST [BUILD] +_ACEOF +fi + +if test -n "$ac_init_help"; then + case $ac_init_help in + short | recursive ) echo "Configuration of Apache CouchDB 1.4.0:";; + esac + cat <<\_ACEOF + +Optional Features: + --disable-option-checking ignore unrecognized --enable/--with options + --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) + --enable-FEATURE[=ARG] include FEATURE [ARG=yes] + --disable-dependency-tracking speeds up one-time build + --enable-dependency-tracking do not reject slow dependency extractors + --enable-shared[=PKGS] build shared libraries [default=yes] + --enable-static[=PKGS] build static libraries [default=no] + --enable-fast-install[=PKGS] + optimize for fast installation [default=yes] + --disable-libtool-lock avoid locking (might break parallel builds) + --enable-js-trunk allow use of SpiderMonkey versions newer than + js185-1.0.0 + + --disable-init don't install init script where applicable + + --disable-launchd don't install launchd configuration where applicable + + --enable-native-mochijson + compile mochijson to native code (EXPERIMENTAL) + + --disable-tests skip tests during build + + --disable-docs skip docs during build + + --enable-strictness exit when optional checks fail + + +Optional Packages: + --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] + --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) + --with-pic try to use only PIC/non-PIC objects [default=use + both] + --with-gnu-ld assume the C compiler uses GNU ld [default=no] + --with-sysroot=DIR Search for dependent libraries within DIR + (or the compiler's sysroot if not specified). + --with-erlang=PATH set PATH to the Erlang include directory + + --with-js-include=PATH set PATH to the SpiderMonkey include directory + + --with-js-lib=PATH set PATH to the SpiderMonkey library directory + + --with-openssl-bin-dir=PATH + path to the open ssl binaries for distribution on + Windows + + --with-msvc-redist-dir=PATH + path to the msvc redistributables for the Windows + platform + + --with-win32-icu-binaries=PATH + set PATH to the Win32 native ICU binaries directory + + --with-win32-curl=PATH set PATH to the Win32 native curl directory + + --with-curl-prefix=PREFIX + Prefix where curl is installed (optional) + --with-curl-exec-prefix=EPREFIX + Exec prefix where curl is installed (optional) + +Some influential environment variables: + CC C compiler command + CFLAGS C compiler flags + LDFLAGS linker flags, e.g. -L if you have libraries in a + nonstandard directory + LIBS libraries to pass to the linker, e.g. -l + CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if + you have headers in a nonstandard directory + CPP C preprocessor + PKG_CONFIG path to pkg-config utility + PKG_CONFIG_PATH + directories to add to pkg-config's search path + PKG_CONFIG_LIBDIR + path overriding pkg-config's built-in search path + CXX C++ compiler command + CXXFLAGS C++ compiler flags + CXXCPP C++ preprocessor + JS_CFLAGS C compiler flags for JS, overriding pkg-config + JS_LIBS linker flags for JS, overriding pkg-config + ERLC_FLAGS general flags to prepend to ERLC_FLAGS + FLAGS general flags to prepend to LDFLAGS and CPPFLAGS + CURL_CONFIG config script used for curl + CURL_CFLAGS CFLAGS used for curl + CURL_LIBS LIBS used for curl + ERL path to the `erl' executable + ERLC path to the `erlc' executable + +Use these variables to override the choices made by `configure' or to help +it to find libraries and programs with nonstandard names/locations. + +Report bugs to . +_ACEOF +ac_status=$? +fi + +if test "$ac_init_help" = "recursive"; then + # If there are subdirs, report their specific --help. + for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue + test -d "$ac_dir" || + { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || + continue + ac_builddir=. + +case "$ac_dir" in +.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; +*) + ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` + # A ".." for each directory in $ac_dir_suffix. + ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` + case $ac_top_builddir_sub in + "") ac_top_builddir_sub=. ac_top_build_prefix= ;; + *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; + esac ;; +esac +ac_abs_top_builddir=$ac_pwd +ac_abs_builddir=$ac_pwd$ac_dir_suffix +# for backward compatibility: +ac_top_builddir=$ac_top_build_prefix + +case $srcdir in + .) # We are building in place. + ac_srcdir=. + ac_top_srcdir=$ac_top_builddir_sub + ac_abs_top_srcdir=$ac_pwd ;; + [\\/]* | ?:[\\/]* ) # Absolute name. + ac_srcdir=$srcdir$ac_dir_suffix; + ac_top_srcdir=$srcdir + ac_abs_top_srcdir=$srcdir ;; + *) # Relative name. + ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix + ac_top_srcdir=$ac_top_build_prefix$srcdir + ac_abs_top_srcdir=$ac_pwd/$srcdir ;; +esac +ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix + + cd "$ac_dir" || { ac_status=$?; continue; } + # Check for guested configure. + if test -f "$ac_srcdir/configure.gnu"; then + echo && + $SHELL "$ac_srcdir/configure.gnu" --help=recursive + elif test -f "$ac_srcdir/configure"; then + echo && + $SHELL "$ac_srcdir/configure" --help=recursive + else + $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 + fi || ac_status=$? + cd "$ac_pwd" || { ac_status=$?; break; } + done +fi + +test -n "$ac_init_help" && exit $ac_status +if $ac_init_version; then + cat <<\_ACEOF +Apache CouchDB configure 1.4.0 +generated by GNU Autoconf 2.69 + +Copyright (C) 2012 Free Software Foundation, Inc. +This configure script is free software; the Free Software Foundation +gives unlimited permission to copy, distribute and modify it. +_ACEOF + exit +fi + +## ------------------------ ## +## Autoconf initialization. ## +## ------------------------ ## + +# ac_fn_c_try_compile LINENO +# -------------------------- +# Try to compile conftest.$ac_ext, and return whether this succeeded. +ac_fn_c_try_compile () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + rm -f conftest.$ac_objext + if { { ac_try="$ac_compile" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compile") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { + test -z "$ac_c_werror_flag" || + test ! -s conftest.err + } && test -s conftest.$ac_objext; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_c_try_compile + +# ac_fn_c_try_cpp LINENO +# ---------------------- +# Try to preprocess conftest.$ac_ext, and return whether this succeeded. +ac_fn_c_try_cpp () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if { { ac_try="$ac_cpp conftest.$ac_ext" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } > conftest.i && { + test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" || + test ! -s conftest.err + }; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_c_try_cpp + +# ac_fn_c_check_header_mongrel LINENO HEADER VAR INCLUDES +# ------------------------------------------------------- +# Tests whether HEADER exists, giving a warning if it cannot be compiled using +# the include files in INCLUDES and setting the cache variable VAR +# accordingly. +ac_fn_c_check_header_mongrel () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if eval \${$3+:} false; then : + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +else + # Is the header compilable? +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5 +$as_echo_n "checking $2 usability... " >&6; } +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +#include <$2> +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_header_compiler=yes +else + ac_header_compiler=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5 +$as_echo "$ac_header_compiler" >&6; } + +# Is the header present? +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5 +$as_echo_n "checking $2 presence... " >&6; } +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include <$2> +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + ac_header_preproc=yes +else + ac_header_preproc=no +fi +rm -f conftest.err conftest.i conftest.$ac_ext +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5 +$as_echo "$ac_header_preproc" >&6; } + +# So? What about this header? +case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in #(( + yes:no: ) + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5 +$as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 +$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} + ;; + no:yes:* ) + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5 +$as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: check for missing prerequisite headers?" >&5 +$as_echo "$as_me: WARNING: $2: check for missing prerequisite headers?" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5 +$as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&5 +$as_echo "$as_me: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 +$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} +( $as_echo "## ------------------------------------------------------------ ## +## Report this to https://issues.apache.org/jira/browse/COUCHDB ## +## ------------------------------------------------------------ ##" + ) | sed "s/^/$as_me: WARNING: /" >&2 + ;; +esac + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +else + eval "$3=\$ac_header_compiler" +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_c_check_header_mongrel + +# ac_fn_c_try_run LINENO +# ---------------------- +# Try to link conftest.$ac_ext, and return whether this succeeded. Assumes +# that executables *can* be run. +ac_fn_c_try_run () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { ac_try='./conftest$ac_exeext' + { { case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_try") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; }; then : + ac_retval=0 +else + $as_echo "$as_me: program exited with status $ac_status" >&5 + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=$ac_status +fi + rm -rf conftest.dSYM conftest_ipa8_conftest.oo + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_c_try_run + +# ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES +# ------------------------------------------------------- +# Tests whether HEADER exists and can be compiled using the include files in +# INCLUDES, setting the cache variable VAR accordingly. +ac_fn_c_check_header_compile () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +#include <$2> +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + eval "$3=yes" +else + eval "$3=no" +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_c_check_header_compile + +# ac_fn_c_try_link LINENO +# ----------------------- +# Try to link conftest.$ac_ext, and return whether this succeeded. +ac_fn_c_try_link () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + rm -f conftest.$ac_objext conftest$ac_exeext + if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { + test -z "$ac_c_werror_flag" || + test ! -s conftest.err + } && test -s conftest$ac_exeext && { + test "$cross_compiling" = yes || + test -x conftest$ac_exeext + }; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information + # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would + # interfere with the next link command; also delete a directory that is + # left behind by Apple's compiler. We do this before executing the actions. + rm -rf conftest.dSYM conftest_ipa8_conftest.oo + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_c_try_link + +# ac_fn_c_check_func LINENO FUNC VAR +# ---------------------------------- +# Tests whether FUNC exists, setting the cache variable VAR accordingly +ac_fn_c_check_func () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +/* Define $2 to an innocuous variant, in case declares $2. + For example, HP-UX 11i declares gettimeofday. */ +#define $2 innocuous_$2 + +/* System header to define __stub macros and hopefully few prototypes, + which can conflict with char $2 (); below. + Prefer to if __STDC__ is defined, since + exists even on freestanding compilers. */ + +#ifdef __STDC__ +# include +#else +# include +#endif + +#undef $2 + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $2 (); +/* The GNU C library defines this for functions which it implements + to always fail with ENOSYS. Some functions are actually named + something starting with __ and the normal name is an alias. */ +#if defined __stub_$2 || defined __stub___$2 +choke me +#endif + +int +main () +{ +return $2 (); + ; + return 0; +} +_ACEOF +if ac_fn_c_try_link "$LINENO"; then : + eval "$3=yes" +else + eval "$3=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_c_check_func + +# ac_fn_cxx_try_compile LINENO +# ---------------------------- +# Try to compile conftest.$ac_ext, and return whether this succeeded. +ac_fn_cxx_try_compile () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + rm -f conftest.$ac_objext + if { { ac_try="$ac_compile" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compile") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { + test -z "$ac_cxx_werror_flag" || + test ! -s conftest.err + } && test -s conftest.$ac_objext; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_cxx_try_compile + +# ac_fn_cxx_try_cpp LINENO +# ------------------------ +# Try to preprocess conftest.$ac_ext, and return whether this succeeded. +ac_fn_cxx_try_cpp () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if { { ac_try="$ac_cpp conftest.$ac_ext" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } > conftest.i && { + test -z "$ac_cxx_preproc_warn_flag$ac_cxx_werror_flag" || + test ! -s conftest.err + }; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_cxx_try_cpp + +# ac_fn_cxx_try_link LINENO +# ------------------------- +# Try to link conftest.$ac_ext, and return whether this succeeded. +ac_fn_cxx_try_link () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + rm -f conftest.$ac_objext conftest$ac_exeext + if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + grep -v '^ *+' conftest.err >conftest.er1 + cat conftest.er1 >&5 + mv -f conftest.er1 conftest.err + fi + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { + test -z "$ac_cxx_werror_flag" || + test ! -s conftest.err + } && test -s conftest$ac_exeext && { + test "$cross_compiling" = yes || + test -x conftest$ac_exeext + }; then : + ac_retval=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=1 +fi + # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information + # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would + # interfere with the next link command; also delete a directory that is + # left behind by Apple's compiler. We do this before executing the actions. + rm -rf conftest.dSYM conftest_ipa8_conftest.oo + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_cxx_try_link + +# ac_fn_cxx_try_run LINENO +# ------------------------ +# Try to link conftest.$ac_ext, and return whether this succeeded. Assumes +# that executables *can* be run. +ac_fn_cxx_try_run () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && { ac_try='./conftest$ac_exeext' + { { case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_try") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; }; then : + ac_retval=0 +else + $as_echo "$as_me: program exited with status $ac_status" >&5 + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 + + ac_retval=$ac_status +fi + rm -rf conftest.dSYM conftest_ipa8_conftest.oo + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + as_fn_set_status $ac_retval + +} # ac_fn_cxx_try_run + +# ac_fn_cxx_check_header_mongrel LINENO HEADER VAR INCLUDES +# --------------------------------------------------------- +# Tests whether HEADER exists, giving a warning if it cannot be compiled using +# the include files in INCLUDES and setting the cache variable VAR +# accordingly. +ac_fn_cxx_check_header_mongrel () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + if eval \${$3+:} false; then : + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +else + # Is the header compilable? +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5 +$as_echo_n "checking $2 usability... " >&6; } +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +#include <$2> +_ACEOF +if ac_fn_cxx_try_compile "$LINENO"; then : + ac_header_compiler=yes +else + ac_header_compiler=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5 +$as_echo "$ac_header_compiler" >&6; } + +# Is the header present? +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5 +$as_echo_n "checking $2 presence... " >&6; } +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include <$2> +_ACEOF +if ac_fn_cxx_try_cpp "$LINENO"; then : + ac_header_preproc=yes +else + ac_header_preproc=no +fi +rm -f conftest.err conftest.i conftest.$ac_ext +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5 +$as_echo "$ac_header_preproc" >&6; } + +# So? What about this header? +case $ac_header_compiler:$ac_header_preproc:$ac_cxx_preproc_warn_flag in #(( + yes:no: ) + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5 +$as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 +$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} + ;; + no:yes:* ) + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5 +$as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: check for missing prerequisite headers?" >&5 +$as_echo "$as_me: WARNING: $2: check for missing prerequisite headers?" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5 +$as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&5 +$as_echo "$as_me: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 +$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} +( $as_echo "## ------------------------------------------------------------ ## +## Report this to https://issues.apache.org/jira/browse/COUCHDB ## +## ------------------------------------------------------------ ##" + ) | sed "s/^/$as_me: WARNING: /" >&2 + ;; +esac + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +else + eval "$3=\$ac_header_compiler" +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +fi + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_cxx_check_header_mongrel + +# ac_fn_cxx_check_func LINENO FUNC VAR +# ------------------------------------ +# Tests whether FUNC exists, setting the cache variable VAR accordingly +ac_fn_cxx_check_func () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +/* Define $2 to an innocuous variant, in case declares $2. + For example, HP-UX 11i declares gettimeofday. */ +#define $2 innocuous_$2 + +/* System header to define __stub macros and hopefully few prototypes, + which can conflict with char $2 (); below. + Prefer to if __STDC__ is defined, since + exists even on freestanding compilers. */ + +#ifdef __STDC__ +# include +#else +# include +#endif + +#undef $2 + +/* Override any GCC internal prototype to avoid an error. + Use char because int might match the return type of a GCC + builtin and then its argument prototype would still apply. */ +#ifdef __cplusplus +extern "C" +#endif +char $2 (); +/* The GNU C library defines this for functions which it implements + to always fail with ENOSYS. Some functions are actually named + something starting with __ and the normal name is an alias. */ +#if defined __stub_$2 || defined __stub___$2 +choke me +#endif + +int +main () +{ +return $2 (); + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_link "$LINENO"; then : + eval "$3=yes" +else + eval "$3=no" +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_cxx_check_func + +# ac_fn_cxx_check_type LINENO TYPE VAR INCLUDES +# --------------------------------------------- +# Tests whether TYPE exists after having included INCLUDES, setting cache +# variable VAR accordingly. +ac_fn_cxx_check_type () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 +$as_echo_n "checking for $2... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 +else + eval "$3=no" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +int +main () +{ +if (sizeof ($2)) + return 0; + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_compile "$LINENO"; then : + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$4 +int +main () +{ +if (sizeof (($2))) + return 0; + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_compile "$LINENO"; then : + +else + eval "$3=yes" +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_cxx_check_type +cat >config.log <<_ACEOF +This file contains any messages produced by compilers while +running configure, to aid debugging if configure makes a mistake. + +It was created by Apache CouchDB $as_me 1.4.0, which was +generated by GNU Autoconf 2.69. Invocation command line was + + $ $0 $@ + +_ACEOF +exec 5>>config.log +{ +cat <<_ASUNAME +## --------- ## +## Platform. ## +## --------- ## + +hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` +uname -m = `(uname -m) 2>/dev/null || echo unknown` +uname -r = `(uname -r) 2>/dev/null || echo unknown` +uname -s = `(uname -s) 2>/dev/null || echo unknown` +uname -v = `(uname -v) 2>/dev/null || echo unknown` + +/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` +/bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` + +/bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` +/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` +/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` +/usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` +/bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` +/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` +/bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` + +_ASUNAME + +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + $as_echo "PATH: $as_dir" + done +IFS=$as_save_IFS + +} >&5 + +cat >&5 <<_ACEOF + + +## ----------- ## +## Core tests. ## +## ----------- ## + +_ACEOF + + +# Keep a trace of the command line. +# Strip out --no-create and --no-recursion so they do not pile up. +# Strip out --silent because we don't want to record it for future runs. +# Also quote any args containing shell meta-characters. +# Make two passes to allow for proper duplicate-argument suppression. +ac_configure_args= +ac_configure_args0= +ac_configure_args1= +ac_must_keep_next=false +for ac_pass in 1 2 +do + for ac_arg + do + case $ac_arg in + -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; + -q | -quiet | --quiet | --quie | --qui | --qu | --q \ + | -silent | --silent | --silen | --sile | --sil) + continue ;; + *\'*) + ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; + esac + case $ac_pass in + 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; + 2) + as_fn_append ac_configure_args1 " '$ac_arg'" + if test $ac_must_keep_next = true; then + ac_must_keep_next=false # Got value, back to normal. + else + case $ac_arg in + *=* | --config-cache | -C | -disable-* | --disable-* \ + | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ + | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ + | -with-* | --with-* | -without-* | --without-* | --x) + case "$ac_configure_args0 " in + "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; + esac + ;; + -* ) ac_must_keep_next=true ;; + esac + fi + as_fn_append ac_configure_args " '$ac_arg'" + ;; + esac + done +done +{ ac_configure_args0=; unset ac_configure_args0;} +{ ac_configure_args1=; unset ac_configure_args1;} + +# When interrupted or exit'd, cleanup temporary files, and complete +# config.log. We remove comments because anyway the quotes in there +# would cause problems or look ugly. +# WARNING: Use '\'' to represent an apostrophe within the trap. +# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. +trap 'exit_status=$? + # Save into config.log some information that might help in debugging. + { + echo + + $as_echo "## ---------------- ## +## Cache variables. ## +## ---------------- ##" + echo + # The following way of writing the cache mishandles newlines in values, +( + for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do + eval ac_val=\$$ac_var + case $ac_val in #( + *${as_nl}*) + case $ac_var in #( + *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 +$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; + esac + case $ac_var in #( + _ | IFS | as_nl) ;; #( + BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( + *) { eval $ac_var=; unset $ac_var;} ;; + esac ;; + esac + done + (set) 2>&1 | + case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( + *${as_nl}ac_space=\ *) + sed -n \ + "s/'\''/'\''\\\\'\'''\''/g; + s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" + ;; #( + *) + sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" + ;; + esac | + sort +) + echo + + $as_echo "## ----------------- ## +## Output variables. ## +## ----------------- ##" + echo + for ac_var in $ac_subst_vars + do + eval ac_val=\$$ac_var + case $ac_val in + *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; + esac + $as_echo "$ac_var='\''$ac_val'\''" + done | sort + echo + + if test -n "$ac_subst_files"; then + $as_echo "## ------------------- ## +## File substitutions. ## +## ------------------- ##" + echo + for ac_var in $ac_subst_files + do + eval ac_val=\$$ac_var + case $ac_val in + *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; + esac + $as_echo "$ac_var='\''$ac_val'\''" + done | sort + echo + fi + + if test -s confdefs.h; then + $as_echo "## ----------- ## +## confdefs.h. ## +## ----------- ##" + echo + cat confdefs.h + echo + fi + test "$ac_signal" != 0 && + $as_echo "$as_me: caught signal $ac_signal" + $as_echo "$as_me: exit $exit_status" + } >&5 + rm -f core *.core core.conftest.* && + rm -f -r conftest* confdefs* conf$$* $ac_clean_files && + exit $exit_status +' 0 +for ac_signal in 1 2 13 15; do + trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal +done +ac_signal=0 + +# confdefs.h avoids OS command line length limits that DEFS can exceed. +rm -f -r conftest* confdefs.h + +$as_echo "/* confdefs.h */" > confdefs.h + +# Predefined preprocessor variables. + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_NAME "$PACKAGE_NAME" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_TARNAME "$PACKAGE_TARNAME" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_VERSION "$PACKAGE_VERSION" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_STRING "$PACKAGE_STRING" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" +_ACEOF + +cat >>confdefs.h <<_ACEOF +#define PACKAGE_URL "$PACKAGE_URL" +_ACEOF + + +# Let the site file select an alternate cache file if it wants to. +# Prefer an explicitly selected file to automatically selected ones. +ac_site_file1=NONE +ac_site_file2=NONE +if test -n "$CONFIG_SITE"; then + # We do not want a PATH search for config.site. + case $CONFIG_SITE in #(( + -*) ac_site_file1=./$CONFIG_SITE;; + */*) ac_site_file1=$CONFIG_SITE;; + *) ac_site_file1=./$CONFIG_SITE;; + esac +elif test "x$prefix" != xNONE; then + ac_site_file1=$prefix/share/config.site + ac_site_file2=$prefix/etc/config.site +else + ac_site_file1=$ac_default_prefix/share/config.site + ac_site_file2=$ac_default_prefix/etc/config.site +fi +for ac_site_file in "$ac_site_file1" "$ac_site_file2" +do + test "x$ac_site_file" = xNONE && continue + if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 +$as_echo "$as_me: loading site script $ac_site_file" >&6;} + sed 's/^/| /' "$ac_site_file" >&5 + . "$ac_site_file" \ + || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "failed to load site script $ac_site_file +See \`config.log' for more details" "$LINENO" 5; } + fi +done + +if test -r "$cache_file"; then + # Some versions of bash will fail to source /dev/null (special files + # actually), so we avoid doing that. DJGPP emulates it as a regular file. + if test /dev/null != "$cache_file" && test -f "$cache_file"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 +$as_echo "$as_me: loading cache $cache_file" >&6;} + case $cache_file in + [\\/]* | ?:[\\/]* ) . "$cache_file";; + *) . "./$cache_file";; + esac + fi +else + { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 +$as_echo "$as_me: creating cache $cache_file" >&6;} + >$cache_file +fi + +# Check that the precious variables saved in the cache have kept the same +# value. +ac_cache_corrupted=false +for ac_var in $ac_precious_vars; do + eval ac_old_set=\$ac_cv_env_${ac_var}_set + eval ac_new_set=\$ac_env_${ac_var}_set + eval ac_old_val=\$ac_cv_env_${ac_var}_value + eval ac_new_val=\$ac_env_${ac_var}_value + case $ac_old_set,$ac_new_set in + set,) + { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 +$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} + ac_cache_corrupted=: ;; + ,set) + { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 +$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} + ac_cache_corrupted=: ;; + ,);; + *) + if test "x$ac_old_val" != "x$ac_new_val"; then + # differences in whitespace do not lead to failure. + ac_old_val_w=`echo x $ac_old_val` + ac_new_val_w=`echo x $ac_new_val` + if test "$ac_old_val_w" != "$ac_new_val_w"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 +$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} + ac_cache_corrupted=: + else + { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 +$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} + eval $ac_var=\$ac_old_val + fi + { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 +$as_echo "$as_me: former value: \`$ac_old_val'" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 +$as_echo "$as_me: current value: \`$ac_new_val'" >&2;} + fi;; + esac + # Pass precious variables to config.status. + if test "$ac_new_set" = set; then + case $ac_new_val in + *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; + *) ac_arg=$ac_var=$ac_new_val ;; + esac + case " $ac_configure_args " in + *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. + *) as_fn_append ac_configure_args " '$ac_arg'" ;; + esac + fi +done +if $ac_cache_corrupted; then + { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 +$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;} + as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5 +fi +## -------------------- ## +## Main body of script. ## +## -------------------- ## + +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu + + + + + +ac_aux_dir= +for ac_dir in build-aux "$srcdir"/build-aux; do + if test -f "$ac_dir/install-sh"; then + ac_aux_dir=$ac_dir + ac_install_sh="$ac_aux_dir/install-sh -c" + break + elif test -f "$ac_dir/install.sh"; then + ac_aux_dir=$ac_dir + ac_install_sh="$ac_aux_dir/install.sh -c" + break + elif test -f "$ac_dir/shtool"; then + ac_aux_dir=$ac_dir + ac_install_sh="$ac_aux_dir/shtool install -c" + break + fi +done +if test -z "$ac_aux_dir"; then + as_fn_error $? "cannot find install-sh, install.sh, or shtool in build-aux \"$srcdir\"/build-aux" "$LINENO" 5 +fi + +# These three variables are undocumented and unsupported, +# and are intended to be withdrawn in a future Autoconf release. +# They can cause serious problems if a builder's source tree is in a directory +# whose full name contains unusual characters. +ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var. +ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var. +ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var. + + + + +ac_config_headers="$ac_config_headers config.h" + +ac_config_headers="$ac_config_headers src/snappy/google-snappy/config.h" + + +am__api_version='1.11' + +# Find a good install program. We prefer a C program (faster), +# so one script is as good as another. But avoid the broken or +# incompatible versions: +# SysV /etc/install, /usr/sbin/install +# SunOS /usr/etc/install +# IRIX /sbin/install +# AIX /bin/install +# AmigaOS /C/install, which installs bootblocks on floppy discs +# AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag +# AFS /usr/afsws/bin/install, which mishandles nonexistent args +# SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" +# OS/2's system install, which has a completely different semantic +# ./install, which can be erroneously created by make from ./install.sh. +# Reject install programs that cannot install multiple files. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5 +$as_echo_n "checking for a BSD-compatible install... " >&6; } +if test -z "$INSTALL"; then +if ${ac_cv_path_install+:} false; then : + $as_echo_n "(cached) " >&6 +else + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + # Account for people who put trailing slashes in PATH elements. +case $as_dir/ in #(( + ./ | .// | /[cC]/* | \ + /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ + ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \ + /usr/ucb/* ) ;; + *) + # OSF1 and SCO ODT 3.0 have their own names for install. + # Don't use installbsd from OSF since it installs stuff as root + # by default. + for ac_prog in ginstall scoinst install; do + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then + if test $ac_prog = install && + grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then + # AIX install. It has an incompatible calling convention. + : + elif test $ac_prog = install && + grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then + # program-specific install script used by HP pwplus--don't use. + : + else + rm -rf conftest.one conftest.two conftest.dir + echo one > conftest.one + echo two > conftest.two + mkdir conftest.dir + if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" && + test -s conftest.one && test -s conftest.two && + test -s conftest.dir/conftest.one && + test -s conftest.dir/conftest.two + then + ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" + break 3 + fi + fi + fi + done + done + ;; +esac + + done +IFS=$as_save_IFS + +rm -rf conftest.one conftest.two conftest.dir + +fi + if test "${ac_cv_path_install+set}" = set; then + INSTALL=$ac_cv_path_install + else + # As a last resort, use the slow shell script. Don't cache a + # value for INSTALL within a source directory, because that will + # break other packages using the cache if that directory is + # removed, or if the value is a relative name. + INSTALL=$ac_install_sh + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5 +$as_echo "$INSTALL" >&6; } + +# Use test -z because SunOS4 sh mishandles braces in ${var-val}. +# It thinks the first close brace ends the variable substitution. +test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' + +test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}' + +test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' - --config-cache | -C) - cache_file=config.cache ;; +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether build environment is sane" >&5 +$as_echo_n "checking whether build environment is sane... " >&6; } +# Just in case +sleep 1 +echo timestamp > conftest.file +# Reject unsafe characters in $srcdir or the absolute working directory +# name. Accept space and tab only in the latter. +am_lf=' +' +case `pwd` in + *[\\\"\#\$\&\'\`$am_lf]*) + as_fn_error $? "unsafe absolute working directory name" "$LINENO" 5;; +esac +case $srcdir in + *[\\\"\#\$\&\'\`$am_lf\ \ ]*) + as_fn_error $? "unsafe srcdir value: \`$srcdir'" "$LINENO" 5;; +esac - -datadir | --datadir | --datadi | --datad) - ac_prev=datadir ;; - -datadir=* | --datadir=* | --datadi=* | --datad=*) - datadir=$ac_optarg ;; +# Do `set' in a subshell so we don't clobber the current shell's +# arguments. Must try -L first in case configure is actually a +# symlink; some systems play weird games with the mod time of symlinks +# (eg FreeBSD returns the mod time of the symlink's containing +# directory). +if ( + set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` + if test "$*" = "X"; then + # -L didn't work. + set X `ls -t "$srcdir/configure" conftest.file` + fi + rm -f conftest.file + if test "$*" != "X $srcdir/configure conftest.file" \ + && test "$*" != "X conftest.file $srcdir/configure"; then - -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ - | --dataroo | --dataro | --datar) - ac_prev=datarootdir ;; - -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ - | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) - datarootdir=$ac_optarg ;; + # If neither matched, then we have a broken ls. This can happen + # if, for instance, CONFIG_SHELL is bash and it inherits a + # broken ls alias from the environment. This has actually + # happened. Such a system could not be considered "sane". + as_fn_error $? "ls -t appears to fail. Make sure there is not a broken +alias in your environment" "$LINENO" 5 + fi - -disable-* | --disable-*) - ac_feature=`expr "x$ac_option" : 'x-*disable-\(.*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_feature" : ".*[^-._$as_cr_alnum]" >/dev/null && - { echo "$as_me: error: invalid feature name: $ac_feature" >&2 - { (exit 1); exit 1; }; } - ac_feature=`echo $ac_feature | sed 's/[-.]/_/g'` - eval enable_$ac_feature=no ;; + test "$2" = conftest.file + ) +then + # Ok. + : +else + as_fn_error $? "newly created file is older than distributed files! +Check your system clock" "$LINENO" 5 +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } +test "$program_prefix" != NONE && + program_transform_name="s&^&$program_prefix&;$program_transform_name" +# Use a double $ so make ignores it. +test "$program_suffix" != NONE && + program_transform_name="s&\$&$program_suffix&;$program_transform_name" +# Double any \ or $. +# By default was `s,x,x', remove it if useless. +ac_script='s/[\\$]/&&/g;s/;s,x,x,$//' +program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"` - -docdir | --docdir | --docdi | --doc | --do) - ac_prev=docdir ;; - -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) - docdir=$ac_optarg ;; +# expand $ac_aux_dir to an absolute path +am_aux_dir=`cd $ac_aux_dir && pwd` - -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) - ac_prev=dvidir ;; - -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) - dvidir=$ac_optarg ;; +if test x"${MISSING+set}" != xset; then + case $am_aux_dir in + *\ * | *\ *) + MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; + *) + MISSING="\${SHELL} $am_aux_dir/missing" ;; + esac +fi +# Use eval to expand $SHELL +if eval "$MISSING --run true"; then + am_missing_run="$MISSING --run " +else + am_missing_run= + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`missing' script is too old or missing" >&5 +$as_echo "$as_me: WARNING: \`missing' script is too old or missing" >&2;} +fi - -enable-* | --enable-*) - ac_feature=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_feature" : ".*[^-._$as_cr_alnum]" >/dev/null && - { echo "$as_me: error: invalid feature name: $ac_feature" >&2 - { (exit 1); exit 1; }; } - ac_feature=`echo $ac_feature | sed 's/[-.]/_/g'` - eval enable_$ac_feature=\$ac_optarg ;; +if test x"${install_sh}" != xset; then + case $am_aux_dir in + *\ * | *\ *) + install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; + *) + install_sh="\${SHELL} $am_aux_dir/install-sh" + esac +fi - -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ - | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ - | --exec | --exe | --ex) - ac_prev=exec_prefix ;; - -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ - | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ - | --exec=* | --exe=* | --ex=*) - exec_prefix=$ac_optarg ;; +# Installed binaries are usually stripped using `strip' when the user +# run `make install-strip'. However `strip' might not be the right +# tool to use in cross-compilation environments, therefore Automake +# will honor the `STRIP' environment variable to overrule this program. +if test "$cross_compiling" != no; then + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. +set dummy ${ac_tool_prefix}strip; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_STRIP+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$STRIP"; then + ac_cv_prog_STRIP="$STRIP" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_STRIP="${ac_tool_prefix}strip" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS - -gas | --gas | --ga | --g) - # Obsolete; use --with-gas. - with_gas=yes ;; +fi +fi +STRIP=$ac_cv_prog_STRIP +if test -n "$STRIP"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 +$as_echo "$STRIP" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi - -help | --help | --hel | --he | -h) - ac_init_help=long ;; - -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) - ac_init_help=recursive ;; - -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) - ac_init_help=short ;; - -host | --host | --hos | --ho) - ac_prev=host_alias ;; - -host=* | --host=* | --hos=* | --ho=*) - host_alias=$ac_optarg ;; +fi +if test -z "$ac_cv_prog_STRIP"; then + ac_ct_STRIP=$STRIP + # Extract the first word of "strip", so it can be a program name with args. +set dummy strip; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_STRIP+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_STRIP"; then + ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_STRIP="strip" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS - -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) - ac_prev=htmldir ;; - -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ - | --ht=*) - htmldir=$ac_optarg ;; +fi +fi +ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP +if test -n "$ac_ct_STRIP"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 +$as_echo "$ac_ct_STRIP" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi - -includedir | --includedir | --includedi | --included | --include \ - | --includ | --inclu | --incl | --inc) - ac_prev=includedir ;; - -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ - | --includ=* | --inclu=* | --incl=* | --inc=*) - includedir=$ac_optarg ;; + if test "x$ac_ct_STRIP" = x; then + STRIP=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + STRIP=$ac_ct_STRIP + fi +else + STRIP="$ac_cv_prog_STRIP" +fi - -infodir | --infodir | --infodi | --infod | --info | --inf) - ac_prev=infodir ;; - -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) - infodir=$ac_optarg ;; +fi +INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" - -libdir | --libdir | --libdi | --libd) - ac_prev=libdir ;; - -libdir=* | --libdir=* | --libdi=* | --libd=*) - libdir=$ac_optarg ;; +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a thread-safe mkdir -p" >&5 +$as_echo_n "checking for a thread-safe mkdir -p... " >&6; } +if test -z "$MKDIR_P"; then + if ${ac_cv_path_mkdir+:} false; then : + $as_echo_n "(cached) " >&6 +else + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_prog in mkdir gmkdir; do + for ac_exec_ext in '' $ac_executable_extensions; do + as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext" || continue + case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #( + 'mkdir (GNU coreutils) '* | \ + 'mkdir (coreutils) '* | \ + 'mkdir (fileutils) '4.1*) + ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext + break 3;; + esac + done + done + done +IFS=$as_save_IFS - -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ - | --libexe | --libex | --libe) - ac_prev=libexecdir ;; - -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ - | --libexe=* | --libex=* | --libe=*) - libexecdir=$ac_optarg ;; +fi - -localedir | --localedir | --localedi | --localed | --locale) - ac_prev=localedir ;; - -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) - localedir=$ac_optarg ;; + test -d ./--version && rmdir ./--version + if test "${ac_cv_path_mkdir+set}" = set; then + MKDIR_P="$ac_cv_path_mkdir -p" + else + # As a last resort, use the slow shell script. Don't cache a + # value for MKDIR_P within a source directory, because that will + # break other packages using the cache if that directory is + # removed, or if the value is a relative name. + MKDIR_P="$ac_install_sh -d" + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5 +$as_echo "$MKDIR_P" >&6; } + +mkdir_p="$MKDIR_P" +case $mkdir_p in + [\\/$]* | ?:[\\/]*) ;; + */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;; +esac + +for ac_prog in gawk mawk nawk awk +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_AWK+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$AWK"; then + ac_cv_prog_AWK="$AWK" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_AWK="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS - -localstatedir | --localstatedir | --localstatedi | --localstated \ - | --localstate | --localstat | --localsta | --localst | --locals) - ac_prev=localstatedir ;; - -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ - | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) - localstatedir=$ac_optarg ;; +fi +fi +AWK=$ac_cv_prog_AWK +if test -n "$AWK"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5 +$as_echo "$AWK" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi - -mandir | --mandir | --mandi | --mand | --man | --ma | --m) - ac_prev=mandir ;; - -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) - mandir=$ac_optarg ;; - -nfp | --nfp | --nf) - # Obsolete; use --without-fp. - with_fp=no ;; + test -n "$AWK" && break +done - -no-create | --no-create | --no-creat | --no-crea | --no-cre \ - | --no-cr | --no-c | -n) - no_create=yes ;; +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5 +$as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; } +set x ${MAKE-make} +ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'` +if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat >conftest.make <<\_ACEOF +SHELL = /bin/sh +all: + @echo '@@@%%%=$(MAKE)=@@@%%%' +_ACEOF +# GNU make sometimes prints "make[1]: Entering ...", which would confuse us. +case `${MAKE-make} -f conftest.make 2>/dev/null` in + *@@@%%%=?*=@@@%%%*) + eval ac_cv_prog_make_${ac_make}_set=yes;; + *) + eval ac_cv_prog_make_${ac_make}_set=no;; +esac +rm -f conftest.make +fi +if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + SET_MAKE= +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + SET_MAKE="MAKE=${MAKE-make}" +fi - -no-recursion | --no-recursion | --no-recursio | --no-recursi \ - | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) - no_recursion=yes ;; +rm -rf .tst 2>/dev/null +mkdir .tst 2>/dev/null +if test -d .tst; then + am__leading_dot=. +else + am__leading_dot=_ +fi +rmdir .tst 2>/dev/null - -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ - | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ - | --oldin | --oldi | --old | --ol | --o) - ac_prev=oldincludedir ;; - -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ - | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ - | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) - oldincludedir=$ac_optarg ;; +if test "`cd $srcdir && pwd`" != "`pwd`"; then + # Use -I$(srcdir) only when $(srcdir) != ., so that make's output + # is not polluted with repeated "-I." + am__isrc=' -I$(srcdir)' + # test to see if srcdir already configured + if test -f $srcdir/config.status; then + as_fn_error $? "source directory already configured; run \"make distclean\" there first" "$LINENO" 5 + fi +fi - -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) - ac_prev=prefix ;; - -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) - prefix=$ac_optarg ;; +# test whether we have cygpath +if test -z "$CYGPATH_W"; then + if (cygpath --version) >/dev/null 2>/dev/null; then + CYGPATH_W='cygpath -w' + else + CYGPATH_W=echo + fi +fi - -program-prefix | --program-prefix | --program-prefi | --program-pref \ - | --program-pre | --program-pr | --program-p) - ac_prev=program_prefix ;; - -program-prefix=* | --program-prefix=* | --program-prefi=* \ - | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) - program_prefix=$ac_optarg ;; - -program-suffix | --program-suffix | --program-suffi | --program-suff \ - | --program-suf | --program-su | --program-s) - ac_prev=program_suffix ;; - -program-suffix=* | --program-suffix=* | --program-suffi=* \ - | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) - program_suffix=$ac_optarg ;; +# Define the identity of the package. + PACKAGE='apache-couchdb' + VERSION='1.4.0' - -program-transform-name | --program-transform-name \ - | --program-transform-nam | --program-transform-na \ - | --program-transform-n | --program-transform- \ - | --program-transform | --program-transfor \ - | --program-transfo | --program-transf \ - | --program-trans | --program-tran \ - | --progr-tra | --program-tr | --program-t) - ac_prev=program_transform_name ;; - -program-transform-name=* | --program-transform-name=* \ - | --program-transform-nam=* | --program-transform-na=* \ - | --program-transform-n=* | --program-transform-=* \ - | --program-transform=* | --program-transfor=* \ - | --program-transfo=* | --program-transf=* \ - | --program-trans=* | --program-tran=* \ - | --progr-tra=* | --program-tr=* | --program-t=*) - program_transform_name=$ac_optarg ;; - -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) - ac_prev=pdfdir ;; - -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) - pdfdir=$ac_optarg ;; +cat >>confdefs.h <<_ACEOF +#define PACKAGE "$PACKAGE" +_ACEOF - -psdir | --psdir | --psdi | --psd | --ps) - ac_prev=psdir ;; - -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) - psdir=$ac_optarg ;; - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil) - silent=yes ;; +cat >>confdefs.h <<_ACEOF +#define VERSION "$VERSION" +_ACEOF - -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) - ac_prev=sbindir ;; - -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ - | --sbi=* | --sb=*) - sbindir=$ac_optarg ;; +# Some tools Automake needs. - -sharedstatedir | --sharedstatedir | --sharedstatedi \ - | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ - | --sharedst | --shareds | --shared | --share | --shar \ - | --sha | --sh) - ac_prev=sharedstatedir ;; - -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ - | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ - | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ - | --sha=* | --sh=*) - sharedstatedir=$ac_optarg ;; +ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"} - -site | --site | --sit) - ac_prev=site ;; - -site=* | --site=* | --sit=*) - site=$ac_optarg ;; - -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) - ac_prev=srcdir ;; - -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) - srcdir=$ac_optarg ;; +AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"} - -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ - | --syscon | --sysco | --sysc | --sys | --sy) - ac_prev=sysconfdir ;; - -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ - | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) - sysconfdir=$ac_optarg ;; - -target | --target | --targe | --targ | --tar | --ta | --t) - ac_prev=target_alias ;; - -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) - target_alias=$ac_optarg ;; +AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"} - -v | -verbose | --verbose | --verbos | --verbo | --verb) - verbose=yes ;; - -version | --version | --versio | --versi | --vers | -V) - ac_init_version=: ;; +AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"} - -with-* | --with-*) - ac_package=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_package" : ".*[^-._$as_cr_alnum]" >/dev/null && - { echo "$as_me: error: invalid package name: $ac_package" >&2 - { (exit 1); exit 1; }; } - ac_package=`echo $ac_package | sed 's/[-.]/_/g'` - eval with_$ac_package=\$ac_optarg ;; - -without-* | --without-*) - ac_package=`expr "x$ac_option" : 'x-*without-\(.*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_package" : ".*[^-._$as_cr_alnum]" >/dev/null && - { echo "$as_me: error: invalid package name: $ac_package" >&2 - { (exit 1); exit 1; }; } - ac_package=`echo $ac_package | sed 's/[-.]/_/g'` - eval with_$ac_package=no ;; +MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"} - --x) - # Obsolete; use --with-x. - with_x=yes ;; +# We need awk for the "check" target. The system "awk" is bad on +# some platforms. +# Always define AMTAR for backward compatibility. Yes, it's still used +# in the wild :-( We should find a proper way to deprecate it ... +AMTAR='$${TAR-tar}' - -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ - | --x-incl | --x-inc | --x-in | --x-i) - ac_prev=x_includes ;; - -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ - | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) - x_includes=$ac_optarg ;; +am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -' - -x-libraries | --x-libraries | --x-librarie | --x-librari \ - | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) - ac_prev=x_libraries ;; - -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ - | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) - x_libraries=$ac_optarg ;; - -*) { echo "$as_me: error: unrecognized option: $ac_option -Try \`$0 --help' for more information." >&2 - { (exit 1); exit 1; }; } - ;; - *=*) - ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` - # Reject names that are not valid shell variable names. - expr "x$ac_envvar" : ".*[^_$as_cr_alnum]" >/dev/null && - { echo "$as_me: error: invalid variable name: $ac_envvar" >&2 - { (exit 1); exit 1; }; } - eval $ac_envvar=\$ac_optarg - export $ac_envvar ;; - *) - # FIXME: should be removed in autoconf 3.0. - echo "$as_me: WARNING: you should use --build, --host, --target" >&2 - expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && - echo "$as_me: WARNING: invalid host type: $ac_option" >&2 - : ${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option} - ;; - esac -done -if test -n "$ac_prev"; then - ac_option=--`echo $ac_prev | sed 's/_/-/g'` - { echo "$as_me: error: missing argument to $ac_option" >&2 - { (exit 1); exit 1; }; } -fi +DEPDIR="${am__leading_dot}deps" -# Be sure to have absolute directory names. -for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ - datadir sysconfdir sharedstatedir localstatedir includedir \ - oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ - libdir localedir mandir -do - eval ac_val=\$$ac_var - case $ac_val in - [\\/$]* | ?:[\\/]* ) continue;; - NONE | '' ) case $ac_var in *prefix ) continue;; esac;; - esac - { echo "$as_me: error: expected an absolute directory name for --$ac_var: $ac_val" >&2 - { (exit 1); exit 1; }; } -done +ac_config_commands="$ac_config_commands depfiles" -# There might be people who depend on the old broken behavior: `$host' -# used to hold the argument of --host etc. -# FIXME: To remove some day. -build=$build_alias -host=$host_alias -target=$target_alias -# FIXME: To remove some day. -if test "x$host_alias" != x; then - if test "x$build_alias" = x; then - cross_compiling=maybe - echo "$as_me: WARNING: If you wanted to set the --build type, don't use --host. - If a cross compiler is detected then cross compile mode will be used." >&2 - elif test "x$build_alias" != "x$host_alias"; then - cross_compiling=yes - fi +am_make=${MAKE-make} +cat > confinc << 'END' +am__doit: + @echo this is the am__doit target +.PHONY: am__doit +END +# If we don't find an include directive, just comment out the code. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for style of include used by $am_make" >&5 +$as_echo_n "checking for style of include used by $am_make... " >&6; } +am__include="#" +am__quote= +_am_result=none +# First try GNU make style include. +echo "include confinc" > confmf +# Ignore all kinds of additional output from `make'. +case `$am_make -s -f confmf 2> /dev/null` in #( +*the\ am__doit\ target*) + am__include=include + am__quote= + _am_result=GNU + ;; +esac +# Now try BSD make style include. +if test "$am__include" = "#"; then + echo '.include "confinc"' > confmf + case `$am_make -s -f confmf 2> /dev/null` in #( + *the\ am__doit\ target*) + am__include=.include + am__quote="\"" + _am_result=BSD + ;; + esac fi -ac_tool_prefix= -test -n "$host_alias" && ac_tool_prefix=$host_alias- -test "$silent" = yes && exec 6>/dev/null +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $_am_result" >&5 +$as_echo "$_am_result" >&6; } +rm -f confinc confmf +# Check whether --enable-dependency-tracking was given. +if test "${enable_dependency_tracking+set}" = set; then : + enableval=$enable_dependency_tracking; +fi -ac_pwd=`pwd` && test -n "$ac_pwd" && -ac_ls_di=`ls -di .` && -ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || - { echo "$as_me: error: Working directory cannot be determined" >&2 - { (exit 1); exit 1; }; } -test "X$ac_ls_di" = "X$ac_pwd_ls_di" || - { echo "$as_me: error: pwd does not report name of working directory" >&2 - { (exit 1); exit 1; }; } +if test "x$enable_dependency_tracking" != xno; then + am_depcomp="$ac_aux_dir/depcomp" + AMDEPBACKSLASH='\' + am__nodep='_no' +fi + if test "x$enable_dependency_tracking" != xno; then + AMDEP_TRUE= + AMDEP_FALSE='#' +else + AMDEP_TRUE='#' + AMDEP_FALSE= +fi -# Find the source files, if location was not specified. -if test -z "$srcdir"; then - ac_srcdir_defaulted=yes - # Try the directory containing this script, then the parent directory. - ac_confdir=`$as_dirname -- "$0" || -$as_expr X"$0" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$0" : 'X\(//\)[^/]' \| \ - X"$0" : 'X\(//\)$' \| \ - X"$0" : 'X\(/\)' \| . 2>/dev/null || -echo X"$0" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - srcdir=$ac_confdir - if test ! -r "$srcdir/$ac_unique_file"; then - srcdir=.. - fi +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. +set dummy ${ac_tool_prefix}gcc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 else - ac_srcdir_defaulted=no + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="${ac_tool_prefix}gcc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + fi -if test ! -r "$srcdir/$ac_unique_file"; then - test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." - { echo "$as_me: error: cannot find sources ($ac_unique_file) in $srcdir" >&2 - { (exit 1); exit 1; }; } fi -ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" -ac_abs_confdir=`( - cd "$srcdir" && test -r "./$ac_unique_file" || { echo "$as_me: error: $ac_msg" >&2 - { (exit 1); exit 1; }; } - pwd)` -# When building in place, set srcdir=. -if test "$ac_abs_confdir" = "$ac_pwd"; then - srcdir=. +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi -# Remove unnecessary trailing slashes from srcdir. -# Double slashes in file names in object file debugging info -# mess up M-x gdb in Emacs. -case $srcdir in -*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; -esac -for ac_var in $ac_precious_vars; do - eval ac_env_${ac_var}_set=\${${ac_var}+set} - eval ac_env_${ac_var}_value=\$${ac_var} - eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} - eval ac_cv_env_${ac_var}_value=\$${ac_var} -done -# -# Report the --help message. -# -if test "$ac_init_help" = "long"; then - # Omit some internal or obsolete options to make the list less imposing. - # This message is too long to be a string in the A/UX 3.1 sh. - cat <<_ACEOF -\`configure' configures Apache CouchDB 1.2.0 to adapt to many kinds of systems. -Usage: $0 [OPTION]... [VAR=VALUE]... +fi +if test -z "$ac_cv_prog_CC"; then + ac_ct_CC=$CC + # Extract the first word of "gcc", so it can be a program name with args. +set dummy gcc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_CC"; then + ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_CC="gcc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS -To assign environment variables (e.g., CC, CFLAGS...), specify them as -VAR=VALUE. See below for descriptions of some of the useful variables. +fi +fi +ac_ct_CC=$ac_cv_prog_ac_ct_CC +if test -n "$ac_ct_CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 +$as_echo "$ac_ct_CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi -Defaults for the options are specified in brackets. + if test "x$ac_ct_CC" = x; then + CC="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + CC=$ac_ct_CC + fi +else + CC="$ac_cv_prog_CC" +fi -Configuration: - -h, --help display this help and exit - --help=short display options specific to this package - --help=recursive display the short help of all the included packages - -V, --version display version information and exit - -q, --quiet, --silent do not print \`checking...' messages - --cache-file=FILE cache test results in FILE [disabled] - -C, --config-cache alias for \`--cache-file=config.cache' - -n, --no-create do not create output files - --srcdir=DIR find the sources in DIR [configure dir or \`..'] +if test -z "$CC"; then + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. +set dummy ${ac_tool_prefix}cc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="${ac_tool_prefix}cc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS -Installation directories: - --prefix=PREFIX install architecture-independent files in PREFIX - [$ac_default_prefix] - --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX - [PREFIX] +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi -By default, \`make install' will install all the files in -\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify -an installation prefix other than \`$ac_default_prefix' using \`--prefix', -for instance \`--prefix=\$HOME'. -For better control, use the options below. + fi +fi +if test -z "$CC"; then + # Extract the first word of "cc", so it can be a program name with args. +set dummy cc; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else + ac_prog_rejected=no +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then + ac_prog_rejected=yes + continue + fi + ac_cv_prog_CC="cc" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS -Fine tuning of the installation directories: - --bindir=DIR user executables [EPREFIX/bin] - --sbindir=DIR system admin executables [EPREFIX/sbin] - --libexecdir=DIR program executables [EPREFIX/libexec] - --sysconfdir=DIR read-only single-machine data [PREFIX/etc] - --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] - --localstatedir=DIR modifiable single-machine data [PREFIX/var] - --libdir=DIR object code libraries [EPREFIX/lib] - --includedir=DIR C header files [PREFIX/include] - --oldincludedir=DIR C header files for non-gcc [/usr/include] - --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] - --datadir=DIR read-only architecture-independent data [DATAROOTDIR] - --infodir=DIR info documentation [DATAROOTDIR/info] - --localedir=DIR locale-dependent data [DATAROOTDIR/locale] - --mandir=DIR man documentation [DATAROOTDIR/man] - --docdir=DIR documentation root [DATAROOTDIR/doc/apache-couchdb] - --htmldir=DIR html documentation [DOCDIR] - --dvidir=DIR dvi documentation [DOCDIR] - --pdfdir=DIR pdf documentation [DOCDIR] - --psdir=DIR ps documentation [DOCDIR] -_ACEOF +if test $ac_prog_rejected = yes; then + # We found a bogon in the path, so make sure we never use it. + set dummy $ac_cv_prog_CC + shift + if test $# != 0; then + # We chose a different compiler from the bogus one. + # However, it has the same basename, so the bogon will be chosen + # first if we set CC to just the basename; use the full file name. + shift + ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" + fi +fi +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi - cat <<\_ACEOF -Program names: - --program-prefix=PREFIX prepend PREFIX to installed program names - --program-suffix=SUFFIX append SUFFIX to installed program names - --program-transform-name=PROGRAM run sed PROGRAM on installed program names +fi +if test -z "$CC"; then + if test -n "$ac_tool_prefix"; then + for ac_prog in cl.exe + do + # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. +set dummy $ac_tool_prefix$ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$CC"; then + ac_cv_prog_CC="$CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_CC="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS -System types: - --build=BUILD configure for building on BUILD [guessed] - --host=HOST cross-compile to build programs to run on HOST [BUILD] -_ACEOF +fi +fi +CC=$ac_cv_prog_CC +if test -n "$CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi -if test -n "$ac_init_help"; then - case $ac_init_help in - short | recursive ) echo "Configuration of Apache CouchDB 1.2.0:";; - esac - cat <<\_ACEOF - -Optional Features: - --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) - --enable-FEATURE[=ARG] include FEATURE [ARG=yes] - --enable-shared[=PKGS] build shared libraries [default=yes] - --enable-static[=PKGS] build static libraries [default=no] - --disable-dependency-tracking speeds up one-time build - --enable-dependency-tracking do not reject slow dependency extractors - --enable-fast-install[=PKGS] - optimize for fast installation [default=yes] - --disable-libtool-lock avoid locking (might break parallel builds) - --enable-js-trunk allow use of SpiderMonkey versions newer than - js185-1.0.0 - --disable-init don't install init script where applicable - --disable-launchd don't install launchd configuration where applicable - --enable-native-mochijson - compile mochijson to native code (EXPERIMENTAL) - -Optional Packages: - --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] - --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) - --with-pic try to use only PIC/non-PIC objects [default=use - both] - --with-gnu-ld assume the C compiler uses GNU ld [default=no] - --with-erlang=PATH set PATH to the Erlang include directory - --with-js-lib=PATH set PATH to the SpiderMonkey library directory - --with-js-include=PATH set PATH to the SpiderMonkey include directory - --with-js-lib-name=NAME set Spidermonkey library NAME - --with-openssl-bin-dir=PATH - path to the open ssl binaries for distribution on - Windows - --with-msvc-redist-dir=PATH - path to the msvc redistributables for the Windows - platform - --with-win32-icu-binaries=PATH - set PATH to the Win32 native ICU binaries directory - --with-win32-curl=PATH set PATH to the Win32 native curl directory - -Some influential environment variables: - CC C compiler command - CFLAGS C compiler flags - LDFLAGS linker flags, e.g. -L if you have libraries in a - nonstandard directory - LIBS libraries to pass to the linker, e.g. -l - CPPFLAGS C/C++/Objective C preprocessor flags, e.g. -I if - you have headers in a nonstandard directory - CPP C preprocessor - PKG_CONFIG path to pkg-config utility - PKG_CONFIG_PATH - directories to add to pkg-config's search path - PKG_CONFIG_LIBDIR - path overriding pkg-config's built-in search path - CXX C++ compiler command - CXXFLAGS C++ compiler flags - CXXCPP C++ preprocessor - ERLC_FLAGS general flags to prepend to ERLC_FLAGS - FLAGS general flags to prepend to LDFLAGS and CPPFLAGS - JS185_CFLAGS - C compiler flags for JS185, overriding pkg-config - JS185_LIBS linker flags for JS185, overriding pkg-config - JS_CFLAGS C compiler flags for JS, overriding pkg-config - JS_LIBS linker flags for JS, overriding pkg-config - ERL path to the `erl' executable - ERLC path to the `erlc' executable - HELP2MAN_EXECUTABLE - path to the `help2man' program -Use these variables to override the choices made by `configure' or to help -it to find libraries and programs with nonstandard names/locations. + test -n "$CC" && break + done +fi +if test -z "$CC"; then + ac_ct_CC=$CC + for ac_prog in cl.exe +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_CC+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_CC"; then + ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_CC="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS -Report bugs to . -_ACEOF -ac_status=$? +fi +fi +ac_ct_CC=$ac_cv_prog_ac_ct_CC +if test -n "$ac_ct_CC"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 +$as_echo "$ac_ct_CC" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi -if test "$ac_init_help" = "recursive"; then - # If there are subdirs, report their specific --help. - for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue - test -d "$ac_dir" || continue - ac_builddir=. -case "$ac_dir" in -.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; -*) - ac_dir_suffix=/`echo "$ac_dir" | sed 's,^\.[\\/],,'` - # A ".." for each directory in $ac_dir_suffix. - ac_top_builddir_sub=`echo "$ac_dir_suffix" | sed 's,/[^\\/]*,/..,g;s,/,,'` - case $ac_top_builddir_sub in - "") ac_top_builddir_sub=. ac_top_build_prefix= ;; - *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; - esac ;; -esac -ac_abs_top_builddir=$ac_pwd -ac_abs_builddir=$ac_pwd$ac_dir_suffix -# for backward compatibility: -ac_top_builddir=$ac_top_build_prefix + test -n "$ac_ct_CC" && break +done -case $srcdir in - .) # We are building in place. - ac_srcdir=. - ac_top_srcdir=$ac_top_builddir_sub - ac_abs_top_srcdir=$ac_pwd ;; - [\\/]* | ?:[\\/]* ) # Absolute name. - ac_srcdir=$srcdir$ac_dir_suffix; - ac_top_srcdir=$srcdir - ac_abs_top_srcdir=$srcdir ;; - *) # Relative name. - ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix - ac_top_srcdir=$ac_top_build_prefix$srcdir - ac_abs_top_srcdir=$ac_pwd/$srcdir ;; + if test "x$ac_ct_CC" = x; then + CC="" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; esac -ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix + CC=$ac_ct_CC + fi +fi - cd "$ac_dir" || { ac_status=$?; continue; } - # Check for guested configure. - if test -f "$ac_srcdir/configure.gnu"; then - echo && - $SHELL "$ac_srcdir/configure.gnu" --help=recursive - elif test -f "$ac_srcdir/configure"; then - echo && - $SHELL "$ac_srcdir/configure" --help=recursive - else - echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 - fi || ac_status=$? - cd "$ac_pwd" || { ac_status=$?; break; } - done fi -test -n "$ac_init_help" && exit $ac_status -if $ac_init_version; then - cat <<\_ACEOF -Apache CouchDB configure 1.2.0 -generated by GNU Autoconf 2.61 -Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, -2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc. -This configure script is free software; the Free Software Foundation -gives unlimited permission to copy, distribute and modify it. -_ACEOF - exit -fi -cat >config.log <<_ACEOF -This file contains any messages produced by compilers while -running configure, to aid debugging if configure makes a mistake. +test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "no acceptable C compiler found in \$PATH +See \`config.log' for more details" "$LINENO" 5; } -It was created by Apache CouchDB $as_me 1.2.0, which was -generated by GNU Autoconf 2.61. Invocation command line was +# Provide some information about the compiler. +$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 +set X $ac_compile +ac_compiler=$2 +for ac_option in --version -v -V -qversion; do + { { ac_try="$ac_compiler $ac_option >&5" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compiler $ac_option >&5") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + sed '10a\ +... rest of stderr output deleted ... + 10q' conftest.err >conftest.er1 + cat conftest.er1 >&5 + fi + rm -f conftest.er1 conftest.err + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } +done - $ $0 $@ +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ -_ACEOF -exec 5>>config.log +int +main () { -cat <<_ASUNAME -## --------- ## -## Platform. ## -## --------- ## - -hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` -uname -m = `(uname -m) 2>/dev/null || echo unknown` -uname -r = `(uname -r) 2>/dev/null || echo unknown` -uname -s = `(uname -s) 2>/dev/null || echo unknown` -uname -v = `(uname -v) 2>/dev/null || echo unknown` -/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` -/bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` + ; + return 0; +} +_ACEOF +ac_clean_files_save=$ac_clean_files +ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out" +# Try to create an executable without -o first, disregard a.out. +# It will help us diagnose broken compilers, and finding out an intuition +# of exeext. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5 +$as_echo_n "checking whether the C compiler works... " >&6; } +ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` -/bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` -/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` -/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` -/usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` -/bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` -/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` -/bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` +# The possible output files: +ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*" -_ASUNAME +ac_rmfiles= +for ac_file in $ac_files +do + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; + * ) ac_rmfiles="$ac_rmfiles $ac_file";; + esac +done +rm -f $ac_rmfiles -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH +if { { ac_try="$ac_link_default" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link_default") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then : + # Autoconf-2.13 could set the ac_cv_exeext variable to `no'. +# So ignore a value of `no', otherwise this would lead to `EXEEXT = no' +# in a Makefile. We should not override ac_cv_exeext if it was cached, +# so that the user can short-circuit this test for compilers unknown to +# Autoconf. +for ac_file in $ac_files '' do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - echo "PATH: $as_dir" + test -f "$ac_file" || continue + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) + ;; + [ab].out ) + # We found the default executable, but exeext='' is most + # certainly right. + break;; + *.* ) + if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no; + then :; else + ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` + fi + # We set ac_cv_exeext here because the later test for it is not + # safe: cross compilers may not add the suffix if given an `-o' + # argument, so we may need to know it at that point already. + # Even if this section looks crufty: it has the advantage of + # actually working. + break;; + * ) + break;; + esac done -IFS=$as_save_IFS - -} >&5 - -cat >&5 <<_ACEOF - - -## ----------- ## -## Core tests. ## -## ----------- ## +test "$ac_cv_exeext" = no && ac_cv_exeext= -_ACEOF +else + ac_file='' +fi +if test -z "$ac_file"; then : + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +$as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 +{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error 77 "C compiler cannot create executables +See \`config.log' for more details" "$LINENO" 5; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5 +$as_echo_n "checking for C compiler default output file name... " >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5 +$as_echo "$ac_file" >&6; } +ac_exeext=$ac_cv_exeext -# Keep a trace of the command line. -# Strip out --no-create and --no-recursion so they do not pile up. -# Strip out --silent because we don't want to record it for future runs. -# Also quote any args containing shell meta-characters. -# Make two passes to allow for proper duplicate-argument suppression. -ac_configure_args= -ac_configure_args0= -ac_configure_args1= -ac_must_keep_next=false -for ac_pass in 1 2 -do - for ac_arg - do - case $ac_arg in - -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil) - continue ;; - *\'*) - ac_arg=`echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; - esac - case $ac_pass in - 1) ac_configure_args0="$ac_configure_args0 '$ac_arg'" ;; - 2) - ac_configure_args1="$ac_configure_args1 '$ac_arg'" - if test $ac_must_keep_next = true; then - ac_must_keep_next=false # Got value, back to normal. - else - case $ac_arg in - *=* | --config-cache | -C | -disable-* | --disable-* \ - | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ - | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ - | -with-* | --with-* | -without-* | --without-* | --x) - case "$ac_configure_args0 " in - "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; - esac - ;; - -* ) ac_must_keep_next=true ;; - esac - fi - ac_configure_args="$ac_configure_args '$ac_arg'" - ;; - esac - done +rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out +ac_clean_files=$ac_clean_files_save +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5 +$as_echo_n "checking for suffix of executables... " >&6; } +if { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then : + # If both `conftest.exe' and `conftest' are `present' (well, observable) +# catch `conftest.exe'. For instance with Cygwin, `ls conftest' will +# work properly (i.e., refer to `conftest.exe'), while it won't with +# `rm'. +for ac_file in conftest.exe conftest conftest.*; do + test -f "$ac_file" || continue + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; + *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` + break;; + * ) break;; + esac done -$as_unset ac_configure_args0 || test "${ac_configure_args0+set}" != set || { ac_configure_args0=; export ac_configure_args0; } -$as_unset ac_configure_args1 || test "${ac_configure_args1+set}" != set || { ac_configure_args1=; export ac_configure_args1; } +else + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "cannot compute suffix of executables: cannot compile and link +See \`config.log' for more details" "$LINENO" 5; } +fi +rm -f conftest conftest$ac_cv_exeext +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5 +$as_echo "$ac_cv_exeext" >&6; } -# When interrupted or exit'd, cleanup temporary files, and complete -# config.log. We remove comments because anyway the quotes in there -# would cause problems or look ugly. -# WARNING: Use '\'' to represent an apostrophe within the trap. -# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. -trap 'exit_status=$? - # Save into config.log some information that might help in debugging. - { - echo +rm -f conftest.$ac_ext +EXEEXT=$ac_cv_exeext +ac_exeext=$EXEEXT +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +int +main () +{ +FILE *f = fopen ("conftest.out", "w"); + return ferror (f) || fclose (f) != 0; - cat <<\_ASBOX -## ---------------- ## -## Cache variables. ## -## ---------------- ## -_ASBOX - echo - # The following way of writing the cache mishandles newlines in values, -( - for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do - eval ac_val=\$$ac_var - case $ac_val in #( - *${as_nl}*) - case $ac_var in #( - *_cv_*) { echo "$as_me:$LINENO: WARNING: Cache variable $ac_var contains a newline." >&5 -echo "$as_me: WARNING: Cache variable $ac_var contains a newline." >&2;} ;; - esac - case $ac_var in #( - _ | IFS | as_nl) ;; #( - *) $as_unset $ac_var ;; - esac ;; - esac - done - (set) 2>&1 | - case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( - *${as_nl}ac_space=\ *) - sed -n \ - "s/'\''/'\''\\\\'\'''\''/g; - s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" - ;; #( - *) - sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" - ;; - esac | - sort -) - echo + ; + return 0; +} +_ACEOF +ac_clean_files="$ac_clean_files conftest.out" +# Check that the compiler produces executables we can run. If not, either +# the compiler is broken, or we cross compile. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5 +$as_echo_n "checking whether we are cross compiling... " >&6; } +if test "$cross_compiling" != yes; then + { { ac_try="$ac_link" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_link") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } + if { ac_try='./conftest$ac_cv_exeext' + { { case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_try") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; }; then + cross_compiling=no + else + if test "$cross_compiling" = maybe; then + cross_compiling=yes + else + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "cannot run C compiled programs. +If you meant to cross compile, use \`--host'. +See \`config.log' for more details" "$LINENO" 5; } + fi + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5 +$as_echo "$cross_compiling" >&6; } - cat <<\_ASBOX -## ----------------- ## -## Output variables. ## -## ----------------- ## -_ASBOX - echo - for ac_var in $ac_subst_vars - do - eval ac_val=\$$ac_var - case $ac_val in - *\'\''*) ac_val=`echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; - esac - echo "$ac_var='\''$ac_val'\''" - done | sort - echo +rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out +ac_clean_files=$ac_clean_files_save +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5 +$as_echo_n "checking for suffix of object files... " >&6; } +if ${ac_cv_objext+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ - if test -n "$ac_subst_files"; then - cat <<\_ASBOX -## ------------------- ## -## File substitutions. ## -## ------------------- ## -_ASBOX - echo - for ac_var in $ac_subst_files - do - eval ac_val=\$$ac_var - case $ac_val in - *\'\''*) ac_val=`echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; - esac - echo "$ac_var='\''$ac_val'\''" - done | sort - echo - fi +int +main () +{ - if test -s confdefs.h; then - cat <<\_ASBOX -## ----------- ## -## confdefs.h. ## -## ----------- ## -_ASBOX - echo - cat confdefs.h - echo - fi - test "$ac_signal" != 0 && - echo "$as_me: caught signal $ac_signal" - echo "$as_me: exit $exit_status" - } >&5 - rm -f core *.core core.conftest.* && - rm -f -r conftest* confdefs* conf$$* $ac_clean_files && - exit $exit_status -' 0 -for ac_signal in 1 2 13 15; do - trap 'ac_signal='$ac_signal'; { (exit 1); exit 1; }' $ac_signal + ; + return 0; +} +_ACEOF +rm -f conftest.o conftest.obj +if { { ac_try="$ac_compile" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compile") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then : + for ac_file in conftest.o conftest.obj conftest.*; do + test -f "$ac_file" || continue; + case $ac_file in + *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;; + *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` + break;; + esac done -ac_signal=0 +else + $as_echo "$as_me: failed program was:" >&5 +sed 's/^/| /' conftest.$ac_ext >&5 -# confdefs.h avoids OS command line length limits that DEFS can exceed. -rm -f -r conftest* confdefs.h +{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "cannot compute suffix of object files: cannot compile +See \`config.log' for more details" "$LINENO" 5; } +fi +rm -f conftest.$ac_cv_objext conftest.$ac_ext +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5 +$as_echo "$ac_cv_objext" >&6; } +OBJEXT=$ac_cv_objext +ac_objext=$OBJEXT +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 +$as_echo_n "checking whether we are using the GNU C compiler... " >&6; } +if ${ac_cv_c_compiler_gnu+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ -# Predefined preprocessor variables. +int +main () +{ +#ifndef __GNUC__ + choke me +#endif -cat >>confdefs.h <<_ACEOF -#define PACKAGE_NAME "$PACKAGE_NAME" + ; + return 0; +} _ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_compiler_gnu=yes +else + ac_compiler_gnu=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +ac_cv_c_compiler_gnu=$ac_compiler_gnu +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 +$as_echo "$ac_cv_c_compiler_gnu" >&6; } +if test $ac_compiler_gnu = yes; then + GCC=yes +else + GCC= +fi +ac_test_CFLAGS=${CFLAGS+set} +ac_save_CFLAGS=$CFLAGS +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 +$as_echo_n "checking whether $CC accepts -g... " >&6; } +if ${ac_cv_prog_cc_g+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_save_c_werror_flag=$ac_c_werror_flag + ac_c_werror_flag=yes + ac_cv_prog_cc_g=no + CFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ -cat >>confdefs.h <<_ACEOF -#define PACKAGE_TARNAME "$PACKAGE_TARNAME" -_ACEOF - +int +main () +{ -cat >>confdefs.h <<_ACEOF -#define PACKAGE_VERSION "$PACKAGE_VERSION" + ; + return 0; +} _ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_g=yes +else + CFLAGS="" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +int +main () +{ -cat >>confdefs.h <<_ACEOF -#define PACKAGE_STRING "$PACKAGE_STRING" + ; + return 0; +} _ACEOF +if ac_fn_c_try_compile "$LINENO"; then : +else + ac_c_werror_flag=$ac_save_c_werror_flag + CFLAGS="-g" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ -cat >>confdefs.h <<_ACEOF -#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" -_ACEOF - +int +main () +{ -# Let the site file select an alternate cache file if it wants to. -# Prefer explicitly selected file to automatically selected ones. -if test -n "$CONFIG_SITE"; then - set x "$CONFIG_SITE" -elif test "x$prefix" != xNONE; then - set x "$prefix/share/config.site" "$prefix/etc/config.site" -else - set x "$ac_default_prefix/share/config.site" \ - "$ac_default_prefix/etc/config.site" + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_g=yes fi -shift -for ac_site_file -do - if test -r "$ac_site_file"; then - { echo "$as_me:$LINENO: loading site script $ac_site_file" >&5 -echo "$as_me: loading site script $ac_site_file" >&6;} - sed 's/^/| /' "$ac_site_file" >&5 - . "$ac_site_file" - fi -done - -if test -r "$cache_file"; then - # Some versions of bash will fail to source /dev/null (special - # files actually), so we avoid doing that. - if test -f "$cache_file"; then - { echo "$as_me:$LINENO: loading cache $cache_file" >&5 -echo "$as_me: loading cache $cache_file" >&6;} - case $cache_file in - [\\/]* | ?:[\\/]* ) . "$cache_file";; - *) . "./$cache_file";; - esac +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + ac_c_werror_flag=$ac_save_c_werror_flag +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 +$as_echo "$ac_cv_prog_cc_g" >&6; } +if test "$ac_test_CFLAGS" = set; then + CFLAGS=$ac_save_CFLAGS +elif test $ac_cv_prog_cc_g = yes; then + if test "$GCC" = yes; then + CFLAGS="-g -O2" + else + CFLAGS="-g" fi else - { echo "$as_me:$LINENO: creating cache $cache_file" >&5 -echo "$as_me: creating cache $cache_file" >&6;} - >$cache_file -fi - -# Check that the precious variables saved in the cache have kept the same -# value. -ac_cache_corrupted=false -for ac_var in $ac_precious_vars; do - eval ac_old_set=\$ac_cv_env_${ac_var}_set - eval ac_new_set=\$ac_env_${ac_var}_set - eval ac_old_val=\$ac_cv_env_${ac_var}_value - eval ac_new_val=\$ac_env_${ac_var}_value - case $ac_old_set,$ac_new_set in - set,) - { echo "$as_me:$LINENO: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 -echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} - ac_cache_corrupted=: ;; - ,set) - { echo "$as_me:$LINENO: error: \`$ac_var' was not set in the previous run" >&5 -echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} - ac_cache_corrupted=: ;; - ,);; - *) - if test "x$ac_old_val" != "x$ac_new_val"; then - { echo "$as_me:$LINENO: error: \`$ac_var' has changed since the previous run:" >&5 -echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} - { echo "$as_me:$LINENO: former value: $ac_old_val" >&5 -echo "$as_me: former value: $ac_old_val" >&2;} - { echo "$as_me:$LINENO: current value: $ac_new_val" >&5 -echo "$as_me: current value: $ac_new_val" >&2;} - ac_cache_corrupted=: - fi;; - esac - # Pass precious variables to config.status. - if test "$ac_new_set" = set; then - case $ac_new_val in - *\'*) ac_arg=$ac_var=`echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; - *) ac_arg=$ac_var=$ac_new_val ;; - esac - case " $ac_configure_args " in - *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. - *) ac_configure_args="$ac_configure_args '$ac_arg'" ;; - esac + if test "$GCC" = yes; then + CFLAGS="-O2" + else + CFLAGS= fi -done -if $ac_cache_corrupted; then - { echo "$as_me:$LINENO: error: changes in the environment can compromise the build" >&5 -echo "$as_me: error: changes in the environment can compromise the build" >&2;} - { { echo "$as_me:$LINENO: error: run \`make distclean' and/or \`rm $cache_file' and start over" >&5 -echo "$as_me: error: run \`make distclean' and/or \`rm $cache_file' and start over" >&2;} - { (exit 1); exit 1; }; } fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 +$as_echo_n "checking for $CC option to accept ISO C89... " >&6; } +if ${ac_cv_prog_cc_c89+:} false; then : + $as_echo_n "(cached) " >&6 +else + ac_cv_prog_cc_c89=no +ac_save_CC=$CC +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +struct stat; +/* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ +struct buf { int x; }; +FILE * (*rcsopen) (struct buf *, struct stat *, int); +static char *e (p, i) + char **p; + int i; +{ + return p[i]; +} +static char *f (char * (*g) (char **, int), char **p, ...) +{ + char *s; + va_list v; + va_start (v,p); + s = g (p, va_arg (v,int)); + va_end (v); + return s; +} +/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has + function prototypes and stuff, but not '\xHH' hex character constants. + These don't provoke an error unfortunately, instead are silently treated + as 'x'. The following induces an error, until -std is added to get + proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an + array size at least. It's necessary to write '\x00'==0 to get something + that's true only with -std. */ +int osf4_cc_array ['\x00' == 0 ? 1 : -1]; +/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters + inside strings and character constants. */ +#define FOO(x) 'x' +int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; +int test (int i, double x); +struct s1 {int (*f) (int a);}; +struct s2 {int (*f) (double a);}; +int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); +int argc; +char **argv; +int +main () +{ +return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; + ; + return 0; +} +_ACEOF +for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ + -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" +do + CC="$ac_save_CC $ac_arg" + if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_prog_cc_c89=$ac_arg +fi +rm -f core conftest.err conftest.$ac_objext + test "x$ac_cv_prog_cc_c89" != "xno" && break +done +rm -f conftest.$ac_ext +CC=$ac_save_CC +fi +# AC_CACHE_VAL +case "x$ac_cv_prog_cc_c89" in + x) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 +$as_echo "none needed" >&6; } ;; + xno) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 +$as_echo "unsupported" >&6; } ;; + *) + CC="$CC $ac_cv_prog_cc_c89" + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 +$as_echo "$ac_cv_prog_cc_c89" >&6; } ;; +esac +if test "x$ac_cv_prog_cc_c89" != xno; then : +fi +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu +depcc="$CC" am_compiler_list= +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 +$as_echo_n "checking dependency style of $depcc... " >&6; } +if ${am_cv_CC_dependencies_compiler_type+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then + # We make a subdir and do the tests there. Otherwise we can end up + # making bogus files that we don't know about and never remove. For + # instance it was reported that on HP-UX the gcc test will end up + # making a dummy file named `D' -- because `-MD' means `put the output + # in D'. + rm -rf conftest.dir + mkdir conftest.dir + # Copy depcomp to subdir because otherwise we won't find it if we're + # using a relative directory. + cp "$am_depcomp" conftest.dir + cd conftest.dir + # We will build objects and dependencies in a subdirectory because + # it helps to detect inapplicable dependency modes. For instance + # both Tru64's cc and ICC support -MD to output dependencies as a + # side effect of compilation, but ICC will put the dependencies in + # the current directory while Tru64 will put them in the object + # directory. + mkdir sub + am_cv_CC_dependencies_compiler_type=none + if test "$am_compiler_list" = ""; then + am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` + fi + am__universal=false + case " $depcc " in #( + *\ -arch\ *\ -arch\ *) am__universal=true ;; + esac + for depmode in $am_compiler_list; do + # Setup a source with many dependencies, because some compilers + # like to wrap large dependency lists on column 80 (with \), and + # we should not choose a depcomp mode which is confused by this. + # + # We need to recreate these files for each test, as the compiler may + # overwrite some of them when testing with obscure command lines. + # This happens at least with the AIX C compiler. + : > sub/conftest.c + for i in 1 2 3 4 5 6; do + echo '#include "conftst'$i'.h"' >> sub/conftest.c + # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with + # Solaris 8's {/usr,}/bin/sh. + touch sub/conftst$i.h + done + echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf + # We check with `-c' and `-o' for the sake of the "dashmstdout" + # mode. It turns out that the SunPro C++ compiler does not properly + # handle `-M -o', and we need to detect this. Also, some Intel + # versions had trouble with output in subdirs + am__obj=sub/conftest.${OBJEXT-o} + am__minus_obj="-o $am__obj" + case $depmode in + gcc) + # This depmode causes a compiler race in universal mode. + test "$am__universal" = false || continue + ;; + nosideeffect) + # after this tag, mechanisms are not by side-effect, so they'll + # only be used when explicitly requested + if test "x$enable_dependency_tracking" = xyes; then + continue + else + break + fi + ;; + msvc7 | msvc7msys | msvisualcpp | msvcmsys) + # This compiler won't grok `-c -o', but also, the minuso test has + # not run yet. These depmodes are late enough in the game, and + # so weak that their functioning should not be impacted. + am__obj=conftest.${OBJEXT-o} + am__minus_obj= + ;; + none) break ;; + esac + if depmode=$depmode \ + source=sub/conftest.c object=$am__obj \ + depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ + $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ + >/dev/null 2>conftest.err && + grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && + grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && + grep $am__obj sub/conftest.Po > /dev/null 2>&1 && + ${MAKE-make} -s -f confmf > /dev/null 2>&1; then + # icc doesn't choke on unknown options, it will just issue warnings + # or remarks (even with -Werror). So we grep stderr for any message + # that says an option was ignored or not supported. + # When given -MP, icc 7.0 and 7.1 complain thusly: + # icc: Command line warning: ignoring option '-M'; no argument required + # The diagnosis changed in icc 8.0: + # icc: Command line remark: option '-MP' not supported + if (grep 'ignoring option' conftest.err || + grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else + am_cv_CC_dependencies_compiler_type=$depmode + break + fi + fi + done + cd .. + rm -rf conftest.dir +else + am_cv_CC_dependencies_compiler_type=none +fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5 +$as_echo "$am_cv_CC_dependencies_compiler_type" >&6; } +CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type - - - - - - - - + if + test "x$enable_dependency_tracking" != xno \ + && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then + am__fastdepCC_TRUE= + am__fastdepCC_FALSE='#' +else + am__fastdepCC_TRUE='#' + am__fastdepCC_FALSE= +fi @@ -1921,511 +4254,461 @@ ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu - - - - - - -ac_aux_dir= -for ac_dir in build-aux "$srcdir"/build-aux; do - if test -f "$ac_dir/install-sh"; then - ac_aux_dir=$ac_dir - ac_install_sh="$ac_aux_dir/install-sh -c" - break - elif test -f "$ac_dir/install.sh"; then - ac_aux_dir=$ac_dir - ac_install_sh="$ac_aux_dir/install.sh -c" - break - elif test -f "$ac_dir/shtool"; then - ac_aux_dir=$ac_dir - ac_install_sh="$ac_aux_dir/shtool install -c" - break - fi -done -if test -z "$ac_aux_dir"; then - { { echo "$as_me:$LINENO: error: cannot find install-sh or install.sh in build-aux \"$srcdir\"/build-aux" >&5 -echo "$as_me: error: cannot find install-sh or install.sh in build-aux \"$srcdir\"/build-aux" >&2;} - { (exit 1); exit 1; }; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5 +$as_echo_n "checking how to run the C preprocessor... " >&6; } +# On Suns, sometimes $CPP names a directory. +if test -n "$CPP" && test -d "$CPP"; then + CPP= fi - -# These three variables are undocumented and unsupported, -# and are intended to be withdrawn in a future Autoconf release. -# They can cause serious problems if a builder's source tree is in a directory -# whose full name contains unusual characters. -ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var. -ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var. -ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var. - - - - -ac_config_headers="$ac_config_headers config.h" - -ac_config_headers="$ac_config_headers src/snappy/google-snappy/config.h" - - -am__api_version='1.10' - -# Find a good install program. We prefer a C program (faster), -# so one script is as good as another. But avoid the broken or -# incompatible versions: -# SysV /etc/install, /usr/sbin/install -# SunOS /usr/etc/install -# IRIX /sbin/install -# AIX /bin/install -# AmigaOS /C/install, which installs bootblocks on floppy discs -# AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag -# AFS /usr/afsws/bin/install, which mishandles nonexistent args -# SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" -# OS/2's system install, which has a completely different semantic -# ./install, which can be erroneously created by make from ./install.sh. -{ echo "$as_me:$LINENO: checking for a BSD-compatible install" >&5 -echo $ECHO_N "checking for a BSD-compatible install... $ECHO_C" >&6; } -if test -z "$INSTALL"; then -if test "${ac_cv_path_install+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +if test -z "$CPP"; then + if ${ac_cv_prog_CPP+:} false; then : + $as_echo_n "(cached) " >&6 else - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH + # Double quotes because CPP needs to be expanded + for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp" + do + ac_preproc_ok=false +for ac_c_preproc_warn_flag in '' yes do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - # Account for people who put trailing slashes in PATH elements. -case $as_dir/ in - ./ | .// | /cC/* | \ - /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ - ?:\\/os2\\/install\\/* | ?:\\/OS2\\/INSTALL\\/* | \ - /usr/ucb/* ) ;; - *) - # OSF1 and SCO ODT 3.0 have their own names for install. - # Don't use installbsd from OSF since it installs stuff as root - # by default. - for ac_prog in ginstall scoinst install; do - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; }; then - if test $ac_prog = install && - grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then - # AIX install. It has an incompatible calling convention. - : - elif test $ac_prog = install && - grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then - # program-specific install script used by HP pwplus--don't use. - : - else - ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" - break 3 - fi - fi - done - done - ;; -esac -done -IFS=$as_save_IFS - + # Use a header file that comes with gcc, so configuring glibc + # with a fresh cross-compiler works. + # Prefer to if __STDC__ is defined, since + # exists even on freestanding compilers. + # On the NeXT, cc -E runs the code through the compiler's parser, + # not just through cpp. "Syntax error" is here to catch this case. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#ifdef __STDC__ +# include +#else +# include +#endif + Syntax error +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : +else + # Broken: fails on valid input. +continue fi - if test "${ac_cv_path_install+set}" = set; then - INSTALL=$ac_cv_path_install - else - # As a last resort, use the slow shell script. Don't cache a - # value for INSTALL within a source directory, because that will - # break other packages using the cache if that directory is - # removed, or if the value is a relative name. - INSTALL=$ac_install_sh - fi -fi -{ echo "$as_me:$LINENO: result: $INSTALL" >&5 -echo "${ECHO_T}$INSTALL" >&6; } - -# Use test -z because SunOS4 sh mishandles braces in ${var-val}. -# It thinks the first close brace ends the variable substitution. -test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' - -test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}' +rm -f conftest.err conftest.i conftest.$ac_ext -test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' + # OK, works on sane cases. Now check whether nonexistent headers + # can be detected and how. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + # Broken: success on invalid input. +continue +else + # Passes both tests. +ac_preproc_ok=: +break +fi +rm -f conftest.err conftest.i conftest.$ac_ext -{ echo "$as_me:$LINENO: checking whether build environment is sane" >&5 -echo $ECHO_N "checking whether build environment is sane... $ECHO_C" >&6; } -# Just in case -sleep 1 -echo timestamp > conftest.file -# Do `set' in a subshell so we don't clobber the current shell's -# arguments. Must try -L first in case configure is actually a -# symlink; some systems play weird games with the mod time of symlinks -# (eg FreeBSD returns the mod time of the symlink's containing -# directory). -if ( - set X `ls -Lt $srcdir/configure conftest.file 2> /dev/null` - if test "$*" = "X"; then - # -L didn't work. - set X `ls -t $srcdir/configure conftest.file` - fi - rm -f conftest.file - if test "$*" != "X $srcdir/configure conftest.file" \ - && test "$*" != "X conftest.file $srcdir/configure"; then +done +# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. +rm -f conftest.i conftest.err conftest.$ac_ext +if $ac_preproc_ok; then : + break +fi - # If neither matched, then we have a broken ls. This can happen - # if, for instance, CONFIG_SHELL is bash and it inherits a - # broken ls alias from the environment. This has actually - # happened. Such a system could not be considered "sane". - { { echo "$as_me:$LINENO: error: ls -t appears to fail. Make sure there is not a broken -alias in your environment" >&5 -echo "$as_me: error: ls -t appears to fail. Make sure there is not a broken -alias in your environment" >&2;} - { (exit 1); exit 1; }; } - fi + done + ac_cv_prog_CPP=$CPP - test "$2" = conftest.file - ) -then - # Ok. - : +fi + CPP=$ac_cv_prog_CPP else - { { echo "$as_me:$LINENO: error: newly created file is older than distributed files! -Check your system clock" >&5 -echo "$as_me: error: newly created file is older than distributed files! -Check your system clock" >&2;} - { (exit 1); exit 1; }; } + ac_cv_prog_CPP=$CPP fi -{ echo "$as_me:$LINENO: result: yes" >&5 -echo "${ECHO_T}yes" >&6; } -test "$program_prefix" != NONE && - program_transform_name="s&^&$program_prefix&;$program_transform_name" -# Use a double $ so make ignores it. -test "$program_suffix" != NONE && - program_transform_name="s&\$&$program_suffix&;$program_transform_name" -# Double any \ or $. echo might interpret backslashes. -# By default was `s,x,x', remove it if useless. -cat <<\_ACEOF >conftest.sed -s/[\\$]/&&/g;s/;s,x,x,$// +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5 +$as_echo "$CPP" >&6; } +ac_preproc_ok=false +for ac_c_preproc_warn_flag in '' yes +do + # Use a header file that comes with gcc, so configuring glibc + # with a fresh cross-compiler works. + # Prefer to if __STDC__ is defined, since + # exists even on freestanding compilers. + # On the NeXT, cc -E runs the code through the compiler's parser, + # not just through cpp. "Syntax error" is here to catch this case. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#ifdef __STDC__ +# include +#else +# include +#endif + Syntax error _ACEOF -program_transform_name=`echo $program_transform_name | sed -f conftest.sed` -rm -f conftest.sed +if ac_fn_c_try_cpp "$LINENO"; then : -# expand $ac_aux_dir to an absolute path -am_aux_dir=`cd $ac_aux_dir && pwd` - -test x"${MISSING+set}" = xset || MISSING="\${SHELL} $am_aux_dir/missing" -# Use eval to expand $SHELL -if eval "$MISSING --run true"; then - am_missing_run="$MISSING --run " else - am_missing_run= - { echo "$as_me:$LINENO: WARNING: \`missing' script is too old or missing" >&5 -echo "$as_me: WARNING: \`missing' script is too old or missing" >&2;} + # Broken: fails on valid input. +continue fi +rm -f conftest.err conftest.i conftest.$ac_ext -{ echo "$as_me:$LINENO: checking for a thread-safe mkdir -p" >&5 -echo $ECHO_N "checking for a thread-safe mkdir -p... $ECHO_C" >&6; } -if test -z "$MKDIR_P"; then - if test "${ac_cv_path_mkdir+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + # OK, works on sane cases. Now check whether nonexistent headers + # can be detected and how. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +_ACEOF +if ac_fn_c_try_cpp "$LINENO"; then : + # Broken: success on invalid input. +continue else - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_prog in mkdir gmkdir; do - for ac_exec_ext in '' $ac_executable_extensions; do - { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; } || continue - case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #( - 'mkdir (GNU coreutils) '* | \ - 'mkdir (coreutils) '* | \ - 'mkdir (fileutils) '4.1*) - ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext - break 3;; - esac - done - done + # Passes both tests. +ac_preproc_ok=: +break +fi +rm -f conftest.err conftest.i conftest.$ac_ext + done -IFS=$as_save_IFS +# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. +rm -f conftest.i conftest.err conftest.$ac_ext +if $ac_preproc_ok; then : +else + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "C preprocessor \"$CPP\" fails sanity check +See \`config.log' for more details" "$LINENO" 5; } fi - if test "${ac_cv_path_mkdir+set}" = set; then - MKDIR_P="$ac_cv_path_mkdir -p" - else - # As a last resort, use the slow shell script. Don't cache a - # value for MKDIR_P within a source directory, because that will - # break other packages using the cache if that directory is - # removed, or if the value is a relative name. - test -d ./--version && rmdir ./--version - MKDIR_P="$ac_install_sh -d" - fi -fi -{ echo "$as_me:$LINENO: result: $MKDIR_P" >&5 -echo "${ECHO_T}$MKDIR_P" >&6; } +ac_ext=c +ac_cpp='$CPP $CPPFLAGS' +ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' +ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' +ac_compiler_gnu=$ac_cv_c_compiler_gnu -mkdir_p="$MKDIR_P" -case $mkdir_p in - [\\/$]* | ?:[\\/]*) ;; - */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;; -esac -for ac_prog in gawk mawk nawk awk -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_AWK+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - if test -n "$AWK"; then - ac_cv_prog_AWK="$AWK" # Let the user override the test. +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5 +$as_echo_n "checking for grep that handles long lines and -e... " >&6; } +if ${ac_cv_path_GREP+:} false; then : + $as_echo_n "(cached) " >&6 else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH + if test -z "$GREP"; then + ac_path_GREP_found=false + # Loop through the user's path and test for each of PROGNAME-LIST + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_AWK="$ac_prog" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done -done -IFS=$as_save_IFS + for ac_prog in grep ggrep; do + for ac_exec_ext in '' $ac_executable_extensions; do + ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext" + as_fn_executable_p "$ac_path_GREP" || continue +# Check for GNU ac_path_GREP and select it if it is found. + # Check for GNU $ac_path_GREP +case `"$ac_path_GREP" --version 2>&1` in +*GNU*) + ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;; +*) + ac_count=0 + $as_echo_n 0123456789 >"conftest.in" + while : + do + cat "conftest.in" "conftest.in" >"conftest.tmp" + mv "conftest.tmp" "conftest.in" + cp "conftest.in" "conftest.nl" + $as_echo 'GREP' >> "conftest.nl" + "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break + diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break + as_fn_arith $ac_count + 1 && ac_count=$as_val + if test $ac_count -gt ${ac_path_GREP_max-0}; then + # Best one so far, save it but keep looking for a better one + ac_cv_path_GREP="$ac_path_GREP" + ac_path_GREP_max=$ac_count + fi + # 10*(2^10) chars as input seems more than enough + test $ac_count -gt 10 && break + done + rm -f conftest.in conftest.tmp conftest.nl conftest.out;; +esac -fi -fi -AWK=$ac_cv_prog_AWK -if test -n "$AWK"; then - { echo "$as_me:$LINENO: result: $AWK" >&5 -echo "${ECHO_T}$AWK" >&6; } + $ac_path_GREP_found && break 3 + done + done + done +IFS=$as_save_IFS + if test -z "$ac_cv_path_GREP"; then + as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 + fi else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + ac_cv_path_GREP=$GREP fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5 +$as_echo "$ac_cv_path_GREP" >&6; } + GREP="$ac_cv_path_GREP" - test -n "$AWK" && break -done -{ echo "$as_me:$LINENO: checking whether ${MAKE-make} sets \$(MAKE)" >&5 -echo $ECHO_N "checking whether ${MAKE-make} sets \$(MAKE)... $ECHO_C" >&6; } -set x ${MAKE-make}; ac_make=`echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'` -if { as_var=ac_cv_prog_make_${ac_make}_set; eval "test \"\${$as_var+set}\" = set"; }; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5 +$as_echo_n "checking for egrep... " >&6; } +if ${ac_cv_path_EGREP+:} false; then : + $as_echo_n "(cached) " >&6 else - cat >conftest.make <<\_ACEOF -SHELL = /bin/sh -all: - @echo '@@@%%%=$(MAKE)=@@@%%%' -_ACEOF -# GNU make sometimes prints "make[1]: Entering...", which would confuse us. -case `${MAKE-make} -f conftest.make 2>/dev/null` in - *@@@%%%=?*=@@@%%%*) - eval ac_cv_prog_make_${ac_make}_set=yes;; - *) - eval ac_cv_prog_make_${ac_make}_set=no;; + if echo a | $GREP -E '(a|b)' >/dev/null 2>&1 + then ac_cv_path_EGREP="$GREP -E" + else + if test -z "$EGREP"; then + ac_path_EGREP_found=false + # Loop through the user's path and test for each of PROGNAME-LIST + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_prog in egrep; do + for ac_exec_ext in '' $ac_executable_extensions; do + ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext" + as_fn_executable_p "$ac_path_EGREP" || continue +# Check for GNU ac_path_EGREP and select it if it is found. + # Check for GNU $ac_path_EGREP +case `"$ac_path_EGREP" --version 2>&1` in +*GNU*) + ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;; +*) + ac_count=0 + $as_echo_n 0123456789 >"conftest.in" + while : + do + cat "conftest.in" "conftest.in" >"conftest.tmp" + mv "conftest.tmp" "conftest.in" + cp "conftest.in" "conftest.nl" + $as_echo 'EGREP' >> "conftest.nl" + "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break + diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break + as_fn_arith $ac_count + 1 && ac_count=$as_val + if test $ac_count -gt ${ac_path_EGREP_max-0}; then + # Best one so far, save it but keep looking for a better one + ac_cv_path_EGREP="$ac_path_EGREP" + ac_path_EGREP_max=$ac_count + fi + # 10*(2^10) chars as input seems more than enough + test $ac_count -gt 10 && break + done + rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac -rm -f conftest.make -fi -if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then - { echo "$as_me:$LINENO: result: yes" >&5 -echo "${ECHO_T}yes" >&6; } - SET_MAKE= -else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } - SET_MAKE="MAKE=${MAKE-make}" -fi - -rm -rf .tst 2>/dev/null -mkdir .tst 2>/dev/null -if test -d .tst; then - am__leading_dot=. -else - am__leading_dot=_ -fi -rmdir .tst 2>/dev/null -if test "`cd $srcdir && pwd`" != "`pwd`"; then - # Use -I$(srcdir) only when $(srcdir) != ., so that make's output - # is not polluted with repeated "-I." - am__isrc=' -I$(srcdir)' - # test to see if srcdir already configured - if test -f $srcdir/config.status; then - { { echo "$as_me:$LINENO: error: source directory already configured; run \"make distclean\" there first" >&5 -echo "$as_me: error: source directory already configured; run \"make distclean\" there first" >&2;} - { (exit 1); exit 1; }; } + $ac_path_EGREP_found && break 3 + done + done + done +IFS=$as_save_IFS + if test -z "$ac_cv_path_EGREP"; then + as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 fi +else + ac_cv_path_EGREP=$EGREP fi -# test whether we have cygpath -if test -z "$CYGPATH_W"; then - if (cygpath --version) >/dev/null 2>/dev/null; then - CYGPATH_W='cygpath -w' - else - CYGPATH_W=echo - fi + fi fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5 +$as_echo "$ac_cv_path_EGREP" >&6; } + EGREP="$ac_cv_path_EGREP" -# Define the identity of the package. - PACKAGE='apache-couchdb' - VERSION='1.2.0' +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5 +$as_echo_n "checking for ANSI C header files... " >&6; } +if ${ac_cv_header_stdc+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +#include +#include +int +main () +{ -cat >>confdefs.h <<_ACEOF -#define PACKAGE "$PACKAGE" + ; + return 0; +} _ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_header_stdc=yes +else + ac_cv_header_stdc=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +if test $ac_cv_header_stdc = yes; then + # SunOS 4.x string.h does not declare mem*, contrary to ANSI. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include -cat >>confdefs.h <<_ACEOF -#define VERSION "$VERSION" _ACEOF +if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | + $EGREP "memchr" >/dev/null 2>&1; then : -# Some tools Automake needs. - -ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"} - - -AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"} - - -AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"} +else + ac_cv_header_stdc=no +fi +rm -f conftest* +fi -AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"} +if test $ac_cv_header_stdc = yes; then + # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +_ACEOF +if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | + $EGREP "free" >/dev/null 2>&1; then : -MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"} +else + ac_cv_header_stdc=no +fi +rm -f conftest* -install_sh=${install_sh-"\$(SHELL) $am_aux_dir/install-sh"} +fi -# Installed binaries are usually stripped using `strip' when the user -# run `make install-strip'. However `strip' might not be the right -# tool to use in cross-compilation environments, therefore Automake -# will honor the `STRIP' environment variable to overrule this program. -if test "$cross_compiling" != no; then - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. -set dummy ${ac_tool_prefix}strip; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_STRIP+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - if test -n "$STRIP"; then - ac_cv_prog_STRIP="$STRIP" # Let the user override the test. +if test $ac_cv_header_stdc = yes; then + # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. + if test "$cross_compiling" = yes; then : + : else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_STRIP="${ac_tool_prefix}strip" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done -done -IFS=$as_save_IFS + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include +#include +#if ((' ' & 0x0FF) == 0x020) +# define ISLOWER(c) ('a' <= (c) && (c) <= 'z') +# define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) +#else +# define ISLOWER(c) \ + (('a' <= (c) && (c) <= 'i') \ + || ('j' <= (c) && (c) <= 'r') \ + || ('s' <= (c) && (c) <= 'z')) +# define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) +#endif + +#define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) +int +main () +{ + int i; + for (i = 0; i < 256; i++) + if (XOR (islower (i), ISLOWER (i)) + || toupper (i) != TOUPPER (i)) + return 2; + return 0; +} +_ACEOF +if ac_fn_c_try_run "$LINENO"; then : +else + ac_cv_header_stdc=no fi +rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ + conftest.$ac_objext conftest.beam conftest.$ac_ext fi -STRIP=$ac_cv_prog_STRIP -if test -n "$STRIP"; then - { echo "$as_me:$LINENO: result: $STRIP" >&5 -echo "${ECHO_T}$STRIP" >&6; } -else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5 +$as_echo "$ac_cv_header_stdc" >&6; } +if test $ac_cv_header_stdc = yes; then +$as_echo "#define STDC_HEADERS 1" >>confdefs.h fi -if test -z "$ac_cv_prog_STRIP"; then - ac_ct_STRIP=$STRIP - # Extract the first word of "strip", so it can be a program name with args. -set dummy strip; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_ac_ct_STRIP+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - if test -n "$ac_ct_STRIP"; then - ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_STRIP="strip" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done -done -IFS=$as_save_IFS -fi -fi -ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP -if test -n "$ac_ct_STRIP"; then - { echo "$as_me:$LINENO: result: $ac_ct_STRIP" >&5 -echo "${ECHO_T}$ac_ct_STRIP" >&6; } -else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } +# On IRIX 5.3, sys/types and inttypes.h are conflicting. +for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \ + inttypes.h stdint.h unistd.h +do : + as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` +ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default +" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + cat >>confdefs.h <<_ACEOF +#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 +_ACEOF + fi - if test "x$ac_ct_STRIP" = x; then - STRIP=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ echo "$as_me:$LINENO: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&5 -echo "$as_me: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&2;} -ac_tool_warned=yes ;; -esac - STRIP=$ac_ct_STRIP - fi +done + + + + ac_fn_c_check_header_mongrel "$LINENO" "minix/config.h" "ac_cv_header_minix_config_h" "$ac_includes_default" +if test "x$ac_cv_header_minix_config_h" = xyes; then : + MINIX=yes else - STRIP="$ac_cv_prog_STRIP" + MINIX= fi -fi -INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" -# We need awk for the "check" target. The system "awk" is bad on -# some platforms. -# Always define AMTAR for backward compatibility. + if test "$MINIX" = yes; then + +$as_echo "#define _POSIX_SOURCE 1" >>confdefs.h -AMTAR=${AMTAR-"${am_missing_run}tar"} -am__tar='${AMTAR} chof - "$$tardir"'; am__untar='${AMTAR} xf -' +$as_echo "#define _POSIX_1_SOURCE 2" >>confdefs.h +$as_echo "#define _MINIX 1" >>confdefs.h + fi + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether it is safe to define __EXTENSIONS__" >&5 +$as_echo_n "checking whether it is safe to define __EXTENSIONS__... " >&6; } +if ${ac_cv_safe_to_define___extensions__+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +# define __EXTENSIONS__ 1 + $ac_includes_default +int +main () +{ -cat >>confdefs.h <<\_ACEOF -#define _GNU_SOURCE 1 + ; + return 0; +} _ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + ac_cv_safe_to_define___extensions__=yes +else + ac_cv_safe_to_define___extensions__=no +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_safe_to_define___extensions__" >&5 +$as_echo "$ac_cv_safe_to_define___extensions__" >&6; } + test $ac_cv_safe_to_define___extensions__ = yes && + $as_echo "#define __EXTENSIONS__ 1" >>confdefs.h + + $as_echo "#define _ALL_SOURCE 1" >>confdefs.h + + $as_echo "#define _GNU_SOURCE 1" >>confdefs.h + + $as_echo "#define _POSIX_PTHREAD_SEMANTICS 1" >>confdefs.h + + $as_echo "#define _TANDEM_SOURCE 1" >>confdefs.h # Check whether --enable-shared was given. -if test "${enable_shared+set}" = set; then +if test "${enable_shared+set}" = set; then : enableval=$enable_shared; p=${PACKAGE-default} case $enableval in yes) enable_shared=yes ;; @@ -2456,7 +4739,7 @@ # Check whether --enable-static was given. -if test "${enable_static+set}" = set; then +if test "${enable_static+set}" = set; then : enableval=$enable_static; p=${PACKAGE-default} case $enableval in yes) enable_static=yes ;; @@ -2495,10 +4778,10 @@ if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. set dummy ${ac_tool_prefix}gcc; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_CC+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. @@ -2508,25 +4791,25 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}gcc" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then - { echo "$as_me:$LINENO: result: $CC" >&5 -echo "${ECHO_T}$CC" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi @@ -2535,10 +4818,10 @@ ac_ct_CC=$CC # Extract the first word of "gcc", so it can be a program name with args. set dummy gcc; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_ac_ct_CC+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_CC+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. @@ -2548,25 +4831,25 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="gcc" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then - { echo "$as_me:$LINENO: result: $ac_ct_CC" >&5 -echo "${ECHO_T}$ac_ct_CC" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 +$as_echo "$ac_ct_CC" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi if test "x$ac_ct_CC" = x; then @@ -2574,12 +4857,8 @@ else case $cross_compiling:$ac_tool_warned in yes:) -{ echo "$as_me:$LINENO: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&5 -echo "$as_me: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&2;} +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CC=$ac_ct_CC @@ -2592,10 +4871,10 @@ if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. set dummy ${ac_tool_prefix}cc; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_CC+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. @@ -2605,25 +4884,25 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}cc" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then - { echo "$as_me:$LINENO: result: $CC" >&5 -echo "${ECHO_T}$CC" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi @@ -2632,10 +4911,10 @@ if test -z "$CC"; then # Extract the first word of "cc", so it can be a program name with args. set dummy cc; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_CC+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. @@ -2646,18 +4925,18 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then ac_prog_rejected=yes continue fi ac_cv_prog_CC="cc" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS if test $ac_prog_rejected = yes; then @@ -2676,11 +4955,11 @@ fi CC=$ac_cv_prog_CC if test -n "$CC"; then - { echo "$as_me:$LINENO: result: $CC" >&5 -echo "${ECHO_T}$CC" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi @@ -2691,10 +4970,10 @@ do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_CC+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CC+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. @@ -2704,25 +4983,25 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="$ac_tool_prefix$ac_prog" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then - { echo "$as_me:$LINENO: result: $CC" >&5 -echo "${ECHO_T}$CC" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 +$as_echo "$CC" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi @@ -2735,10 +5014,10 @@ do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_ac_ct_CC+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_CC+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. @@ -2748,25 +5027,25 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="$ac_prog" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then - { echo "$as_me:$LINENO: result: $ac_ct_CC" >&5 -echo "${ECHO_T}$ac_ct_CC" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 +$as_echo "$ac_ct_CC" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi @@ -2778,12 +5057,8 @@ else case $cross_compiling:$ac_tool_warned in yes:) -{ echo "$as_me:$LINENO: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&5 -echo "$as_me: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&2;} +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CC=$ac_ct_CC @@ -2793,299 +5068,42 @@ fi -test -z "$CC" && { { echo "$as_me:$LINENO: error: no acceptable C compiler found in \$PATH -See \`config.log' for more details." >&5 -echo "$as_me: error: no acceptable C compiler found in \$PATH -See \`config.log' for more details." >&2;} - { (exit 1); exit 1; }; } - -# Provide some information about the compiler. -echo "$as_me:$LINENO: checking for C compiler version" >&5 -ac_compiler=`set X $ac_compile; echo $2` -{ (ac_try="$ac_compiler --version >&5" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compiler --version >&5") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } -{ (ac_try="$ac_compiler -v >&5" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compiler -v >&5") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } -{ (ac_try="$ac_compiler -V >&5" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compiler -V >&5") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } - -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -ac_clean_files_save=$ac_clean_files -ac_clean_files="$ac_clean_files a.out a.exe b.out" -# Try to create an executable without -o first, disregard a.out. -# It will help us diagnose broken compilers, and finding out an intuition -# of exeext. -{ echo "$as_me:$LINENO: checking for C compiler default output file name" >&5 -echo $ECHO_N "checking for C compiler default output file name... $ECHO_C" >&6; } -ac_link_default=`echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` -# -# List of possible output files, starting from the most likely. -# The algorithm is not robust to junk in `.', hence go to wildcards (a.*) -# only as a last resort. b.out is created by i960 compilers. -ac_files='a_out.exe a.exe conftest.exe a.out conftest a.* conftest.* b.out' -# -# The IRIX 6 linker writes into existing files which may not be -# executable, retaining their permissions. Remove them first so a -# subsequent execution test works. -ac_rmfiles= -for ac_file in $ac_files -do - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.o | *.obj ) ;; - * ) ac_rmfiles="$ac_rmfiles $ac_file";; - esac -done -rm -f $ac_rmfiles - -if { (ac_try="$ac_link_default" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link_default") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then - # Autoconf-2.13 could set the ac_cv_exeext variable to `no'. -# So ignore a value of `no', otherwise this would lead to `EXEEXT = no' -# in a Makefile. We should not override ac_cv_exeext if it was cached, -# so that the user can short-circuit this test for compilers unknown to -# Autoconf. -for ac_file in $ac_files '' -do - test -f "$ac_file" || continue - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.o | *.obj ) - ;; - [ab].out ) - # We found the default executable, but exeext='' is most - # certainly right. - break;; - *.* ) - if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no; - then :; else - ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` - fi - # We set ac_cv_exeext here because the later test for it is not - # safe: cross compilers may not add the suffix if given an `-o' - # argument, so we may need to know it at that point already. - # Even if this section looks crufty: it has the advantage of - # actually working. - break;; - * ) - break;; - esac -done -test "$ac_cv_exeext" = no && ac_cv_exeext= - -else - ac_file='' -fi - -{ echo "$as_me:$LINENO: result: $ac_file" >&5 -echo "${ECHO_T}$ac_file" >&6; } -if test -z "$ac_file"; then - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - -{ { echo "$as_me:$LINENO: error: C compiler cannot create executables -See \`config.log' for more details." >&5 -echo "$as_me: error: C compiler cannot create executables -See \`config.log' for more details." >&2;} - { (exit 77); exit 77; }; } -fi - -ac_exeext=$ac_cv_exeext - -# Check that the compiler produces executables we can run. If not, either -# the compiler is broken, or we cross compile. -{ echo "$as_me:$LINENO: checking whether the C compiler works" >&5 -echo $ECHO_N "checking whether the C compiler works... $ECHO_C" >&6; } -# FIXME: These cross compiler hacks should be removed for Autoconf 3.0 -# If not cross compiling, check that we can run a simple program. -if test "$cross_compiling" != yes; then - if { ac_try='./$ac_file' - { (case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; }; then - cross_compiling=no - else - if test "$cross_compiling" = maybe; then - cross_compiling=yes - else - { { echo "$as_me:$LINENO: error: cannot run C compiled programs. -If you meant to cross compile, use \`--host'. -See \`config.log' for more details." >&5 -echo "$as_me: error: cannot run C compiled programs. -If you meant to cross compile, use \`--host'. -See \`config.log' for more details." >&2;} - { (exit 1); exit 1; }; } - fi - fi -fi -{ echo "$as_me:$LINENO: result: yes" >&5 -echo "${ECHO_T}yes" >&6; } - -rm -f a.out a.exe conftest$ac_cv_exeext b.out -ac_clean_files=$ac_clean_files_save -# Check that the compiler produces executables we can run. If not, either -# the compiler is broken, or we cross compile. -{ echo "$as_me:$LINENO: checking whether we are cross compiling" >&5 -echo $ECHO_N "checking whether we are cross compiling... $ECHO_C" >&6; } -{ echo "$as_me:$LINENO: result: $cross_compiling" >&5 -echo "${ECHO_T}$cross_compiling" >&6; } - -{ echo "$as_me:$LINENO: checking for suffix of executables" >&5 -echo $ECHO_N "checking for suffix of executables... $ECHO_C" >&6; } -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then - # If both `conftest.exe' and `conftest' are `present' (well, observable) -# catch `conftest.exe'. For instance with Cygwin, `ls conftest' will -# work properly (i.e., refer to `conftest.exe'), while it won't with -# `rm'. -for ac_file in conftest.exe conftest conftest.*; do - test -f "$ac_file" || continue - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.o | *.obj ) ;; - *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` - break;; - * ) break;; - esac -done -else - { { echo "$as_me:$LINENO: error: cannot compute suffix of executables: cannot compile and link -See \`config.log' for more details." >&5 -echo "$as_me: error: cannot compute suffix of executables: cannot compile and link -See \`config.log' for more details." >&2;} - { (exit 1); exit 1; }; } -fi - -rm -f conftest$ac_cv_exeext -{ echo "$as_me:$LINENO: result: $ac_cv_exeext" >&5 -echo "${ECHO_T}$ac_cv_exeext" >&6; } - -rm -f conftest.$ac_ext -EXEEXT=$ac_cv_exeext -ac_exeext=$EXEEXT -{ echo "$as_me:$LINENO: checking for suffix of object files" >&5 -echo $ECHO_N "checking for suffix of object files... $ECHO_C" >&6; } -if test "${ac_cv_objext+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -rm -f conftest.o conftest.obj -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then - for ac_file in conftest.o conftest.obj conftest.*; do - test -f "$ac_file" || continue; - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf ) ;; - *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` - break;; - esac -done -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - -{ { echo "$as_me:$LINENO: error: cannot compute suffix of object files: cannot compile -See \`config.log' for more details." >&5 -echo "$as_me: error: cannot compute suffix of object files: cannot compile -See \`config.log' for more details." >&2;} - { (exit 1); exit 1; }; } -fi +test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "no acceptable C compiler found in \$PATH +See \`config.log' for more details" "$LINENO" 5; } -rm -f conftest.$ac_cv_objext conftest.$ac_ext -fi -{ echo "$as_me:$LINENO: result: $ac_cv_objext" >&5 -echo "${ECHO_T}$ac_cv_objext" >&6; } -OBJEXT=$ac_cv_objext -ac_objext=$OBJEXT -{ echo "$as_me:$LINENO: checking whether we are using the GNU C compiler" >&5 -echo $ECHO_N "checking whether we are using the GNU C compiler... $ECHO_C" >&6; } -if test "${ac_cv_c_compiler_gnu+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +# Provide some information about the compiler. +$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 +set X $ac_compile +ac_compiler=$2 +for ac_option in --version -v -V -qversion; do + { { ac_try="$ac_compiler $ac_option >&5" +case "(($ac_try" in + *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; + *) ac_try_echo=$ac_try;; +esac +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compiler $ac_option >&5") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + sed '10a\ +... rest of stderr output deleted ... + 10q' conftest.err >conftest.er1 + cat conftest.er1 >&5 + fi + rm -f conftest.er1 conftest.err + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } +done + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 +$as_echo_n "checking whether we are using the GNU C compiler... " >&6; } +if ${ac_cv_c_compiler_gnu+:} false; then : + $as_echo_n "(cached) " >&6 else - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int @@ -3099,54 +5117,34 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then +if ac_fn_c_try_compile "$LINENO"; then : ac_compiler_gnu=yes else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_compiler_gnu=no + ac_compiler_gnu=no fi - rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_c_compiler_gnu=$ac_compiler_gnu fi -{ echo "$as_me:$LINENO: result: $ac_cv_c_compiler_gnu" >&5 -echo "${ECHO_T}$ac_cv_c_compiler_gnu" >&6; } -GCC=`test $ac_compiler_gnu = yes && echo yes` +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 +$as_echo "$ac_cv_c_compiler_gnu" >&6; } +if test $ac_compiler_gnu = yes; then + GCC=yes +else + GCC= +fi ac_test_CFLAGS=${CFLAGS+set} ac_save_CFLAGS=$CFLAGS -{ echo "$as_me:$LINENO: checking whether $CC accepts -g" >&5 -echo $ECHO_N "checking whether $CC accepts -g... $ECHO_C" >&6; } -if test "${ac_cv_prog_cc_g+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 +$as_echo_n "checking whether $CC accepts -g... " >&6; } +if ${ac_cv_prog_cc_g+:} false; then : + $as_echo_n "(cached) " >&6 else ac_save_c_werror_flag=$ac_c_werror_flag ac_c_werror_flag=yes ac_cv_prog_cc_g=no CFLAGS="-g" - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int @@ -3157,34 +5155,11 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then +if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_g=yes else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - CFLAGS="" - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + CFLAGS="" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int @@ -3195,35 +5170,12 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then - : -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 +if ac_fn_c_try_compile "$LINENO"; then : - ac_c_werror_flag=$ac_save_c_werror_flag +else + ac_c_werror_flag=$ac_save_c_werror_flag CFLAGS="-g" - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int @@ -3234,42 +5186,18 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then +if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_g=yes -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - fi - rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi - rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi - rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_c_werror_flag=$ac_save_c_werror_flag fi -{ echo "$as_me:$LINENO: result: $ac_cv_prog_cc_g" >&5 -echo "${ECHO_T}$ac_cv_prog_cc_g" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 +$as_echo "$ac_cv_prog_cc_g" >&6; } if test "$ac_test_CFLAGS" = set; then CFLAGS=$ac_save_CFLAGS elif test $ac_cv_prog_cc_g = yes; then @@ -3285,23 +5213,18 @@ CFLAGS= fi fi -{ echo "$as_me:$LINENO: checking for $CC option to accept ISO C89" >&5 -echo $ECHO_N "checking for $CC option to accept ISO C89... $ECHO_C" >&6; } -if test "${ac_cv_prog_cc_c89+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 +$as_echo_n "checking for $CC option to accept ISO C89... " >&6; } +if ${ac_cv_prog_cc_c89+:} false; then : + $as_echo_n "(cached) " >&6 else ac_cv_prog_cc_c89=no ac_save_CC=$CC -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF +cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include -#include -#include +struct stat; /* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ struct buf { int x; }; FILE * (*rcsopen) (struct buf *, struct stat *, int); @@ -3353,31 +5276,9 @@ -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" do CC="$ac_save_CC $ac_arg" - rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then + if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_c89=$ac_arg -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - fi - rm -f core conftest.err conftest.$ac_objext test "x$ac_cv_prog_cc_c89" != "xno" && break done @@ -3388,92 +5289,32 @@ # AC_CACHE_VAL case "x$ac_cv_prog_cc_c89" in x) - { echo "$as_me:$LINENO: result: none needed" >&5 -echo "${ECHO_T}none needed" >&6; } ;; + { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 +$as_echo "none needed" >&6; } ;; xno) - { echo "$as_me:$LINENO: result: unsupported" >&5 -echo "${ECHO_T}unsupported" >&6; } ;; + { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 +$as_echo "unsupported" >&6; } ;; *) CC="$CC $ac_cv_prog_cc_c89" - { echo "$as_me:$LINENO: result: $ac_cv_prog_cc_c89" >&5 -echo "${ECHO_T}$ac_cv_prog_cc_c89" >&6; } ;; + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 +$as_echo "$ac_cv_prog_cc_c89" >&6; } ;; esac +if test "x$ac_cv_prog_cc_c89" != xno; then : +fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu -DEPDIR="${am__leading_dot}deps" - -ac_config_commands="$ac_config_commands depfiles" - - -am_make=${MAKE-make} -cat > confinc << 'END' -am__doit: - @echo done -.PHONY: am__doit -END -# If we don't find an include directive, just comment out the code. -{ echo "$as_me:$LINENO: checking for style of include used by $am_make" >&5 -echo $ECHO_N "checking for style of include used by $am_make... $ECHO_C" >&6; } -am__include="#" -am__quote= -_am_result=none -# First try GNU make style include. -echo "include confinc" > confmf -# We grep out `Entering directory' and `Leaving directory' -# messages which can occur if `w' ends up in MAKEFLAGS. -# In particular we don't look at `^make:' because GNU make might -# be invoked under some other name (usually "gmake"), in which -# case it prints its new name instead of `make'. -if test "`$am_make -s -f confmf 2> /dev/null | grep -v 'ing directory'`" = "done"; then - am__include=include - am__quote= - _am_result=GNU -fi -# Now try BSD make style include. -if test "$am__include" = "#"; then - echo '.include "confinc"' > confmf - if test "`$am_make -s -f confmf 2> /dev/null`" = "done"; then - am__include=.include - am__quote="\"" - _am_result=BSD - fi -fi - - -{ echo "$as_me:$LINENO: result: $_am_result" >&5 -echo "${ECHO_T}$_am_result" >&6; } -rm -f confinc confmf - -# Check whether --enable-dependency-tracking was given. -if test "${enable_dependency_tracking+set}" = set; then - enableval=$enable_dependency_tracking; -fi - -if test "x$enable_dependency_tracking" != xno; then - am_depcomp="$ac_aux_dir/depcomp" - AMDEPBACKSLASH='\' -fi - if test "x$enable_dependency_tracking" != xno; then - AMDEP_TRUE= - AMDEP_FALSE='#' -else - AMDEP_TRUE='#' - AMDEP_FALSE= -fi - - depcc="$CC" am_compiler_list= -{ echo "$as_me:$LINENO: checking dependency style of $depcc" >&5 -echo $ECHO_N "checking dependency style of $depcc... $ECHO_C" >&6; } -if test "${am_cv_CC_dependencies_compiler_type+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 +$as_echo_n "checking dependency style of $depcc... " >&6; } +if ${am_cv_CC_dependencies_compiler_type+:} false; then : + $as_echo_n "(cached) " >&6 else if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up @@ -3481,6 +5322,7 @@ # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named `D' -- because `-MD' means `put the output # in D'. + rm -rf conftest.dir mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. @@ -3498,6 +5340,11 @@ if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` fi + am__universal=false + case " $depcc " in #( + *\ -arch\ *\ -arch\ *) am__universal=true ;; + esac + for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and @@ -3515,7 +5362,17 @@ done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf + # We check with `-c' and `-o' for the sake of the "dashmstdout" + # mode. It turns out that the SunPro C++ compiler does not properly + # handle `-M -o', and we need to detect this. Also, some Intel + # versions had trouble with output in subdirs + am__obj=sub/conftest.${OBJEXT-o} + am__minus_obj="-o $am__obj" case $depmode in + gcc) + # This depmode causes a compiler race in universal mode. + test "$am__universal" = false || continue + ;; nosideeffect) # after this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested @@ -3525,19 +5382,23 @@ break fi ;; + msvc7 | msvc7msys | msvisualcpp | msvcmsys) + # This compiler won't grok `-c -o', but also, the minuso test has + # not run yet. These depmodes are late enough in the game, and + # so weak that their functioning should not be impacted. + am__obj=conftest.${OBJEXT-o} + am__minus_obj= + ;; none) break ;; esac - # We check with `-c' and `-o' for the sake of the "dashmstdout" - # mode. It turns out that the SunPro C++ compiler does not properly - # handle `-M -o', and we need to detect this. if depmode=$depmode \ - source=sub/conftest.c object=sub/conftest.${OBJEXT-o} \ + source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ - $SHELL ./depcomp $depcc -c -o sub/conftest.${OBJEXT-o} sub/conftest.c \ + $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && - grep sub/conftest.${OBJEXT-o} sub/conftest.Po > /dev/null 2>&1 && + grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message @@ -3561,8 +5422,8 @@ fi fi -{ echo "$as_me:$LINENO: result: $am_cv_CC_dependencies_compiler_type" >&5 -echo "${ECHO_T}$am_cv_CC_dependencies_compiler_type" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5 +$as_echo "$am_cv_CC_dependencies_compiler_type" >&6; } CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type if @@ -3578,14 +5439,14 @@ case `pwd` in *\ * | *\ *) - { echo "$as_me:$LINENO: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&5 -echo "$as_me: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&2;} ;; + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&5 +$as_echo "$as_me: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&2;} ;; esac -macro_version='2.2.10' -macro_revision='1.3175' +macro_version='2.4' +macro_revision='1.3293' @@ -3603,35 +5464,27 @@ # Make sure we can run config.sub. $SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 || - { { echo "$as_me:$LINENO: error: cannot run $SHELL $ac_aux_dir/config.sub" >&5 -echo "$as_me: error: cannot run $SHELL $ac_aux_dir/config.sub" >&2;} - { (exit 1); exit 1; }; } - -{ echo "$as_me:$LINENO: checking build system type" >&5 -echo $ECHO_N "checking build system type... $ECHO_C" >&6; } -if test "${ac_cv_build+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + as_fn_error $? "cannot run $SHELL $ac_aux_dir/config.sub" "$LINENO" 5 + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5 +$as_echo_n "checking build system type... " >&6; } +if ${ac_cv_build+:} false; then : + $as_echo_n "(cached) " >&6 else ac_build_alias=$build_alias test "x$ac_build_alias" = x && ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"` test "x$ac_build_alias" = x && - { { echo "$as_me:$LINENO: error: cannot guess build type; you must specify one" >&5 -echo "$as_me: error: cannot guess build type; you must specify one" >&2;} - { (exit 1); exit 1; }; } + as_fn_error $? "cannot guess build type; you must specify one" "$LINENO" 5 ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` || - { { echo "$as_me:$LINENO: error: $SHELL $ac_aux_dir/config.sub $ac_build_alias failed" >&5 -echo "$as_me: error: $SHELL $ac_aux_dir/config.sub $ac_build_alias failed" >&2;} - { (exit 1); exit 1; }; } + as_fn_error $? "$SHELL $ac_aux_dir/config.sub $ac_build_alias failed" "$LINENO" 5 fi -{ echo "$as_me:$LINENO: result: $ac_cv_build" >&5 -echo "${ECHO_T}$ac_cv_build" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5 +$as_echo "$ac_cv_build" >&6; } case $ac_cv_build in *-*-*) ;; -*) { { echo "$as_me:$LINENO: error: invalid value of canonical build" >&5 -echo "$as_me: error: invalid value of canonical build" >&2;} - { (exit 1); exit 1; }; };; +*) as_fn_error $? "invalid value of canonical build" "$LINENO" 5;; esac build=$ac_cv_build ac_save_IFS=$IFS; IFS='-' @@ -3647,28 +5500,24 @@ case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac -{ echo "$as_me:$LINENO: checking host system type" >&5 -echo $ECHO_N "checking host system type... $ECHO_C" >&6; } -if test "${ac_cv_host+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5 +$as_echo_n "checking host system type... " >&6; } +if ${ac_cv_host+:} false; then : + $as_echo_n "(cached) " >&6 else if test "x$host_alias" = x; then ac_cv_host=$ac_cv_build else ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` || - { { echo "$as_me:$LINENO: error: $SHELL $ac_aux_dir/config.sub $host_alias failed" >&5 -echo "$as_me: error: $SHELL $ac_aux_dir/config.sub $host_alias failed" >&2;} - { (exit 1); exit 1; }; } + as_fn_error $? "$SHELL $ac_aux_dir/config.sub $host_alias failed" "$LINENO" 5 fi fi -{ echo "$as_me:$LINENO: result: $ac_cv_host" >&5 -echo "${ECHO_T}$ac_cv_host" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5 +$as_echo "$ac_cv_host" >&6; } case $ac_cv_host in *-*-*) ;; -*) { { echo "$as_me:$LINENO: error: invalid value of canonical host" >&5 -echo "$as_me: error: invalid value of canonical host" >&2;} - { (exit 1); exit 1; }; };; +*) as_fn_error $? "invalid value of canonical host" "$LINENO" 5;; esac host=$ac_cv_host ac_save_IFS=$IFS; IFS='-' @@ -3705,10 +5554,10 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO -{ echo "$as_me:$LINENO: checking how to print strings" >&5 -echo $ECHO_N "checking how to print strings... $ECHO_C" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 +$as_echo_n "checking how to print strings... " >&6; } # Test print first, because it will be a builtin if present. -if test "X`print -r -- -n 2>/dev/null`" = X-n && \ +if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then ECHO='print -r --' elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then @@ -3732,12 +5581,12 @@ } case "$ECHO" in - printf*) { echo "$as_me:$LINENO: result: printf" >&5 -echo "${ECHO_T}printf" >&6; } ;; - print*) { echo "$as_me:$LINENO: result: print -r" >&5 -echo "${ECHO_T}print -r" >&6; } ;; - *) { echo "$as_me:$LINENO: result: cat" >&5 -echo "${ECHO_T}cat" >&6; } ;; + printf*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: printf" >&5 +$as_echo "printf" >&6; } ;; + print*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: print -r" >&5 +$as_echo "print -r" >&6; } ;; + *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: cat" >&5 +$as_echo "cat" >&6; } ;; esac @@ -3753,51 +5602,46 @@ -{ echo "$as_me:$LINENO: checking for a sed that does not truncate output" >&5 -echo $ECHO_N "checking for a sed that does not truncate output... $ECHO_C" >&6; } -if test "${ac_cv_path_SED+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a sed that does not truncate output" >&5 +$as_echo_n "checking for a sed that does not truncate output... " >&6; } +if ${ac_cv_path_SED+:} false; then : + $as_echo_n "(cached) " >&6 else ac_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/ for ac_i in 1 2 3 4 5 6 7; do ac_script="$ac_script$as_nl$ac_script" done - echo "$ac_script" | sed 99q >conftest.sed - $as_unset ac_script || ac_script= - # Extract the first word of "sed gsed" to use in msg output -if test -z "$SED"; then -set dummy sed gsed; ac_prog_name=$2 -if test "${ac_cv_path_SED+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else + echo "$ac_script" 2>/dev/null | sed 99q >conftest.sed + { ac_script=; unset ac_script;} + if test -z "$SED"; then ac_path_SED_found=false -# Loop through the user's path and test for each of PROGNAME-LIST -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR + # Loop through the user's path and test for each of PROGNAME-LIST + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_prog in sed gsed; do - for ac_exec_ext in '' $ac_executable_extensions; do - ac_path_SED="$as_dir/$ac_prog$ac_exec_ext" - { test -f "$ac_path_SED" && $as_test_x "$ac_path_SED"; } || continue - # Check for GNU ac_path_SED and select it if it is found. + for ac_prog in sed gsed; do + for ac_exec_ext in '' $ac_executable_extensions; do + ac_path_SED="$as_dir/$ac_prog$ac_exec_ext" + as_fn_executable_p "$ac_path_SED" || continue +# Check for GNU ac_path_SED and select it if it is found. # Check for GNU $ac_path_SED case `"$ac_path_SED" --version 2>&1` in *GNU*) ac_cv_path_SED="$ac_path_SED" ac_path_SED_found=:;; *) ac_count=0 - echo $ECHO_N "0123456789$ECHO_C" >"conftest.in" + $as_echo_n 0123456789 >"conftest.in" while : do cat "conftest.in" "conftest.in" >"conftest.tmp" mv "conftest.tmp" "conftest.in" cp "conftest.in" "conftest.nl" - echo '' >> "conftest.nl" + $as_echo '' >> "conftest.nl" "$ac_path_SED" -f conftest.sed < "conftest.nl" >"conftest.out" 2>/dev/null || break diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break - ac_count=`expr $ac_count + 1` + as_fn_arith $ac_count + 1 && ac_count=$as_val if test $ac_count -gt ${ac_path_SED_max-0}; then # Best one so far, save it but keep looking for a better one ac_cv_path_SED="$ac_path_SED" @@ -3809,31 +5653,21 @@ rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac - - $ac_path_SED_found && break 3 + $ac_path_SED_found && break 3 + done + done done -done - -done IFS=$as_save_IFS - - -fi - -SED="$ac_cv_path_SED" -if test -z "$SED"; then - { { echo "$as_me:$LINENO: error: no acceptable $ac_prog_name could be found in \$PATH" >&5 -echo "$as_me: error: no acceptable $ac_prog_name could be found in \$PATH" >&2;} - { (exit 1); exit 1; }; } -fi - + if test -z "$ac_cv_path_SED"; then + as_fn_error $? "no acceptable sed could be found in \$PATH" "$LINENO" 5 + fi else ac_cv_path_SED=$SED fi fi -{ echo "$as_me:$LINENO: result: $ac_cv_path_SED" >&5 -echo "${ECHO_T}$ac_cv_path_SED" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_SED" >&5 +$as_echo "$ac_cv_path_SED" >&6; } SED="$ac_cv_path_SED" rm -f conftest.sed @@ -3849,211 +5683,44 @@ - -{ echo "$as_me:$LINENO: checking for grep that handles long lines and -e" >&5 -echo $ECHO_N "checking for grep that handles long lines and -e... $ECHO_C" >&6; } -if test "${ac_cv_path_GREP+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - # Extract the first word of "grep ggrep" to use in msg output -if test -z "$GREP"; then -set dummy grep ggrep; ac_prog_name=$2 -if test "${ac_cv_path_GREP+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - ac_path_GREP_found=false -# Loop through the user's path and test for each of PROGNAME-LIST -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_prog in grep ggrep; do - for ac_exec_ext in '' $ac_executable_extensions; do - ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext" - { test -f "$ac_path_GREP" && $as_test_x "$ac_path_GREP"; } || continue - # Check for GNU ac_path_GREP and select it if it is found. - # Check for GNU $ac_path_GREP -case `"$ac_path_GREP" --version 2>&1` in -*GNU*) - ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;; -*) - ac_count=0 - echo $ECHO_N "0123456789$ECHO_C" >"conftest.in" - while : - do - cat "conftest.in" "conftest.in" >"conftest.tmp" - mv "conftest.tmp" "conftest.in" - cp "conftest.in" "conftest.nl" - echo 'GREP' >> "conftest.nl" - "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break - diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break - ac_count=`expr $ac_count + 1` - if test $ac_count -gt ${ac_path_GREP_max-0}; then - # Best one so far, save it but keep looking for a better one - ac_cv_path_GREP="$ac_path_GREP" - ac_path_GREP_max=$ac_count - fi - # 10*(2^10) chars as input seems more than enough - test $ac_count -gt 10 && break - done - rm -f conftest.in conftest.tmp conftest.nl conftest.out;; -esac - - - $ac_path_GREP_found && break 3 - done -done - -done -IFS=$as_save_IFS - - -fi - -GREP="$ac_cv_path_GREP" -if test -z "$GREP"; then - { { echo "$as_me:$LINENO: error: no acceptable $ac_prog_name could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" >&5 -echo "$as_me: error: no acceptable $ac_prog_name could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" >&2;} - { (exit 1); exit 1; }; } -fi - -else - ac_cv_path_GREP=$GREP -fi - - -fi -{ echo "$as_me:$LINENO: result: $ac_cv_path_GREP" >&5 -echo "${ECHO_T}$ac_cv_path_GREP" >&6; } - GREP="$ac_cv_path_GREP" - - -{ echo "$as_me:$LINENO: checking for egrep" >&5 -echo $ECHO_N "checking for egrep... $ECHO_C" >&6; } -if test "${ac_cv_path_EGREP+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - if echo a | $GREP -E '(a|b)' >/dev/null 2>&1 - then ac_cv_path_EGREP="$GREP -E" - else - # Extract the first word of "egrep" to use in msg output -if test -z "$EGREP"; then -set dummy egrep; ac_prog_name=$2 -if test "${ac_cv_path_EGREP+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - ac_path_EGREP_found=false -# Loop through the user's path and test for each of PROGNAME-LIST -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_prog in egrep; do - for ac_exec_ext in '' $ac_executable_extensions; do - ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext" - { test -f "$ac_path_EGREP" && $as_test_x "$ac_path_EGREP"; } || continue - # Check for GNU ac_path_EGREP and select it if it is found. - # Check for GNU $ac_path_EGREP -case `"$ac_path_EGREP" --version 2>&1` in -*GNU*) - ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;; -*) - ac_count=0 - echo $ECHO_N "0123456789$ECHO_C" >"conftest.in" - while : - do - cat "conftest.in" "conftest.in" >"conftest.tmp" - mv "conftest.tmp" "conftest.in" - cp "conftest.in" "conftest.nl" - echo 'EGREP' >> "conftest.nl" - "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break - diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break - ac_count=`expr $ac_count + 1` - if test $ac_count -gt ${ac_path_EGREP_max-0}; then - # Best one so far, save it but keep looking for a better one - ac_cv_path_EGREP="$ac_path_EGREP" - ac_path_EGREP_max=$ac_count - fi - # 10*(2^10) chars as input seems more than enough - test $ac_count -gt 10 && break - done - rm -f conftest.in conftest.tmp conftest.nl conftest.out;; -esac - - - $ac_path_EGREP_found && break 3 - done -done - -done -IFS=$as_save_IFS - - -fi - -EGREP="$ac_cv_path_EGREP" -if test -z "$EGREP"; then - { { echo "$as_me:$LINENO: error: no acceptable $ac_prog_name could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" >&5 -echo "$as_me: error: no acceptable $ac_prog_name could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" >&2;} - { (exit 1); exit 1; }; } -fi - -else - ac_cv_path_EGREP=$EGREP -fi - - - fi -fi -{ echo "$as_me:$LINENO: result: $ac_cv_path_EGREP" >&5 -echo "${ECHO_T}$ac_cv_path_EGREP" >&6; } - EGREP="$ac_cv_path_EGREP" - - -{ echo "$as_me:$LINENO: checking for fgrep" >&5 -echo $ECHO_N "checking for fgrep... $ECHO_C" >&6; } -if test "${ac_cv_path_FGREP+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for fgrep" >&5 +$as_echo_n "checking for fgrep... " >&6; } +if ${ac_cv_path_FGREP+:} false; then : + $as_echo_n "(cached) " >&6 else if echo 'ab*c' | $GREP -F 'ab*c' >/dev/null 2>&1 then ac_cv_path_FGREP="$GREP -F" else - # Extract the first word of "fgrep" to use in msg output -if test -z "$FGREP"; then -set dummy fgrep; ac_prog_name=$2 -if test "${ac_cv_path_FGREP+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else + if test -z "$FGREP"; then ac_path_FGREP_found=false -# Loop through the user's path and test for each of PROGNAME-LIST -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR + # Loop through the user's path and test for each of PROGNAME-LIST + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_prog in fgrep; do - for ac_exec_ext in '' $ac_executable_extensions; do - ac_path_FGREP="$as_dir/$ac_prog$ac_exec_ext" - { test -f "$ac_path_FGREP" && $as_test_x "$ac_path_FGREP"; } || continue - # Check for GNU ac_path_FGREP and select it if it is found. + for ac_prog in fgrep; do + for ac_exec_ext in '' $ac_executable_extensions; do + ac_path_FGREP="$as_dir/$ac_prog$ac_exec_ext" + as_fn_executable_p "$ac_path_FGREP" || continue +# Check for GNU ac_path_FGREP and select it if it is found. # Check for GNU $ac_path_FGREP case `"$ac_path_FGREP" --version 2>&1` in *GNU*) ac_cv_path_FGREP="$ac_path_FGREP" ac_path_FGREP_found=:;; *) ac_count=0 - echo $ECHO_N "0123456789$ECHO_C" >"conftest.in" + $as_echo_n 0123456789 >"conftest.in" while : do cat "conftest.in" "conftest.in" >"conftest.tmp" mv "conftest.tmp" "conftest.in" cp "conftest.in" "conftest.nl" - echo 'FGREP' >> "conftest.nl" + $as_echo 'FGREP' >> "conftest.nl" "$ac_path_FGREP" FGREP < "conftest.nl" >"conftest.out" 2>/dev/null || break diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break - ac_count=`expr $ac_count + 1` + as_fn_arith $ac_count + 1 && ac_count=$as_val if test $ac_count -gt ${ac_path_FGREP_max-0}; then # Best one so far, save it but keep looking for a better one ac_cv_path_FGREP="$ac_path_FGREP" @@ -4065,33 +5732,22 @@ rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac - - $ac_path_FGREP_found && break 3 + $ac_path_FGREP_found && break 3 + done + done done -done - -done IFS=$as_save_IFS - - -fi - -FGREP="$ac_cv_path_FGREP" -if test -z "$FGREP"; then - { { echo "$as_me:$LINENO: error: no acceptable $ac_prog_name could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" >&5 -echo "$as_me: error: no acceptable $ac_prog_name could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" >&2;} - { (exit 1); exit 1; }; } -fi - + if test -z "$ac_cv_path_FGREP"; then + as_fn_error $? "no acceptable fgrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 + fi else ac_cv_path_FGREP=$FGREP fi - fi fi -{ echo "$as_me:$LINENO: result: $ac_cv_path_FGREP" >&5 -echo "${ECHO_T}$ac_cv_path_FGREP" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_FGREP" >&5 +$as_echo "$ac_cv_path_FGREP" >&6; } FGREP="$ac_cv_path_FGREP" @@ -4116,7 +5772,7 @@ # Check whether --with-gnu-ld was given. -if test "${with_gnu_ld+set}" = set; then +if test "${with_gnu_ld+set}" = set; then : withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes else with_gnu_ld=no @@ -4125,8 +5781,8 @@ ac_prog=ld if test "$GCC" = yes; then # Check if gcc -print-prog-name=ld gives a path. - { echo "$as_me:$LINENO: checking for ld used by $CC" >&5 -echo $ECHO_N "checking for ld used by $CC... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5 +$as_echo_n "checking for ld used by $CC... " >&6; } case $host in *-*-mingw*) # gcc leaves a trailing carriage return which upsets mingw @@ -4155,14 +5811,14 @@ ;; esac elif test "$with_gnu_ld" = yes; then - { echo "$as_me:$LINENO: checking for GNU ld" >&5 -echo $ECHO_N "checking for GNU ld... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5 +$as_echo_n "checking for GNU ld... " >&6; } else - { echo "$as_me:$LINENO: checking for non-GNU ld" >&5 -echo $ECHO_N "checking for non-GNU ld... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5 +$as_echo_n "checking for non-GNU ld... " >&6; } fi -if test "${lt_cv_path_LD+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +if ${lt_cv_path_LD+:} false; then : + $as_echo_n "(cached) " >&6 else if test -z "$LD"; then lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR @@ -4192,19 +5848,17 @@ LD="$lt_cv_path_LD" if test -n "$LD"; then - { echo "$as_me:$LINENO: result: $LD" >&5 -echo "${ECHO_T}$LD" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LD" >&5 +$as_echo "$LD" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi -test -z "$LD" && { { echo "$as_me:$LINENO: error: no acceptable ld found in \$PATH" >&5 -echo "$as_me: error: no acceptable ld found in \$PATH" >&2;} - { (exit 1); exit 1; }; } -{ echo "$as_me:$LINENO: checking if the linker ($LD) is GNU ld" >&5 -echo $ECHO_N "checking if the linker ($LD) is GNU ld... $ECHO_C" >&6; } -if test "${lt_cv_prog_gnu_ld+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5 +$as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; } +if ${lt_cv_prog_gnu_ld+:} false; then : + $as_echo_n "(cached) " >&6 else # I'd rather use --version here, but apparently some GNU lds only accept -v. case `$LD -v 2>&1 &5 -echo "${ECHO_T}$lt_cv_prog_gnu_ld" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_gnu_ld" >&5 +$as_echo "$lt_cv_prog_gnu_ld" >&6; } with_gnu_ld=$lt_cv_prog_gnu_ld @@ -4228,10 +5882,10 @@ -{ echo "$as_me:$LINENO: checking for BSD- or MS-compatible name lister (nm)" >&5 -echo $ECHO_N "checking for BSD- or MS-compatible name lister (nm)... $ECHO_C" >&6; } -if test "${lt_cv_path_NM+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for BSD- or MS-compatible name lister (nm)" >&5 +$as_echo_n "checking for BSD- or MS-compatible name lister (nm)... " >&6; } +if ${lt_cv_path_NM+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$NM"; then # Let the user override the test. @@ -4277,8 +5931,8 @@ : ${lt_cv_path_NM=no} fi fi -{ echo "$as_me:$LINENO: result: $lt_cv_path_NM" >&5 -echo "${ECHO_T}$lt_cv_path_NM" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_NM" >&5 +$as_echo "$lt_cv_path_NM" >&6; } if test "$lt_cv_path_NM" != "no"; then NM="$lt_cv_path_NM" else @@ -4291,10 +5945,10 @@ do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_DUMPBIN+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_DUMPBIN+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$DUMPBIN"; then ac_cv_prog_DUMPBIN="$DUMPBIN" # Let the user override the test. @@ -4304,25 +5958,25 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_DUMPBIN="$ac_tool_prefix$ac_prog" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi DUMPBIN=$ac_cv_prog_DUMPBIN if test -n "$DUMPBIN"; then - { echo "$as_me:$LINENO: result: $DUMPBIN" >&5 -echo "${ECHO_T}$DUMPBIN" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DUMPBIN" >&5 +$as_echo "$DUMPBIN" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi @@ -4335,10 +5989,10 @@ do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_ac_ct_DUMPBIN+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_DUMPBIN+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_DUMPBIN"; then ac_cv_prog_ac_ct_DUMPBIN="$ac_ct_DUMPBIN" # Let the user override the test. @@ -4348,25 +6002,25 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_DUMPBIN="$ac_prog" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi ac_ct_DUMPBIN=$ac_cv_prog_ac_ct_DUMPBIN if test -n "$ac_ct_DUMPBIN"; then - { echo "$as_me:$LINENO: result: $ac_ct_DUMPBIN" >&5 -echo "${ECHO_T}$ac_ct_DUMPBIN" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DUMPBIN" >&5 +$as_echo "$ac_ct_DUMPBIN" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi @@ -4378,12 +6032,8 @@ else case $cross_compiling:$ac_tool_warned in yes:) -{ echo "$as_me:$LINENO: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&5 -echo "$as_me: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&2;} +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac DUMPBIN=$ac_ct_DUMPBIN @@ -4411,10 +6061,10 @@ -{ echo "$as_me:$LINENO: checking the name lister ($NM) interface" >&5 -echo $ECHO_N "checking the name lister ($NM) interface... $ECHO_C" >&6; } -if test "${lt_cv_nm_interface+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the name lister ($NM) interface" >&5 +$as_echo_n "checking the name lister ($NM) interface... " >&6; } +if ${lt_cv_nm_interface+:} false; then : + $as_echo_n "(cached) " >&6 else lt_cv_nm_interface="BSD nm" echo "int some_variable = 0;" > conftest.$ac_ext @@ -4429,27 +6079,27 @@ if $GREP 'External.*some_variable' conftest.out > /dev/null; then lt_cv_nm_interface="MS dumpbin" fi - rm -r -f conftest* + rm -f conftest* fi -{ echo "$as_me:$LINENO: result: $lt_cv_nm_interface" >&5 -echo "${ECHO_T}$lt_cv_nm_interface" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_nm_interface" >&5 +$as_echo "$lt_cv_nm_interface" >&6; } -{ echo "$as_me:$LINENO: checking whether ln -s works" >&5 -echo $ECHO_N "checking whether ln -s works... $ECHO_C" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ln -s works" >&5 +$as_echo_n "checking whether ln -s works... " >&6; } LN_S=$as_ln_s if test "$LN_S" = "ln -s"; then - { echo "$as_me:$LINENO: result: yes" >&5 -echo "${ECHO_T}yes" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } else - { echo "$as_me:$LINENO: result: no, using $LN_S" >&5 -echo "${ECHO_T}no, using $LN_S" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no, using $LN_S" >&5 +$as_echo "no, using $LN_S" >&6; } fi # find the maximum length of command line arguments -{ echo "$as_me:$LINENO: checking the maximum length of command line arguments" >&5 -echo $ECHO_N "checking the maximum length of command line arguments... $ECHO_C" >&6; } -if test "${lt_cv_sys_max_cmd_len+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the maximum length of command line arguments" >&5 +$as_echo_n "checking the maximum length of command line arguments... " >&6; } +if ${lt_cv_sys_max_cmd_len+:} false; then : + $as_echo_n "(cached) " >&6 else i=0 teststring="ABCD" @@ -4571,11 +6221,11 @@ fi if test -n $lt_cv_sys_max_cmd_len ; then - { echo "$as_me:$LINENO: result: $lt_cv_sys_max_cmd_len" >&5 -echo "${ECHO_T}$lt_cv_sys_max_cmd_len" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_max_cmd_len" >&5 +$as_echo "$lt_cv_sys_max_cmd_len" >&6; } else - { echo "$as_me:$LINENO: result: none" >&5 -echo "${ECHO_T}none" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: none" >&5 +$as_echo "none" >&6; } fi max_cmd_len=$lt_cv_sys_max_cmd_len @@ -4588,28 +6238,28 @@ : ${MV="mv -f"} : ${RM="rm -f"} -{ echo "$as_me:$LINENO: checking whether the shell understands some XSI constructs" >&5 -echo $ECHO_N "checking whether the shell understands some XSI constructs... $ECHO_C" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands some XSI constructs" >&5 +$as_echo_n "checking whether the shell understands some XSI constructs... " >&6; } # Try some XSI features xsi_shell=no ( _lt_dummy="a/b/c" - test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ - = c,a/b,, \ + test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ + = c,a/b,b/c, \ && eval 'test $(( 1 + 1 )) -eq 2 \ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ && xsi_shell=yes -{ echo "$as_me:$LINENO: result: $xsi_shell" >&5 -echo "${ECHO_T}$xsi_shell" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $xsi_shell" >&5 +$as_echo "$xsi_shell" >&6; } -{ echo "$as_me:$LINENO: checking whether the shell understands \"+=\"" >&5 -echo $ECHO_N "checking whether the shell understands \"+=\"... $ECHO_C" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands \"+=\"" >&5 +$as_echo_n "checking whether the shell understands \"+=\"... " >&6; } lt_shell_append=no ( foo=bar; set foo baz; eval "$1+=\$2" && test "$foo" = barbaz ) \ >/dev/null 2>&1 \ && lt_shell_append=yes -{ echo "$as_me:$LINENO: result: $lt_shell_append" >&5 -echo "${ECHO_T}$lt_shell_append" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_shell_append" >&5 +$as_echo "$lt_shell_append" >&6; } if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then @@ -4643,15 +6293,89 @@ -{ echo "$as_me:$LINENO: checking for $LD option to reload object files" >&5 -echo $ECHO_N "checking for $LD option to reload object files... $ECHO_C" >&6; } -if test "${lt_cv_ld_reload_flag+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5 +$as_echo_n "checking how to convert $build file names to $host format... " >&6; } +if ${lt_cv_to_host_file_cmd+:} false; then : + $as_echo_n "(cached) " >&6 +else + case $host in + *-*-mingw* ) + case $build in + *-*-mingw* ) # actually msys + lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 + ;; + *-*-cygwin* ) + lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 + ;; + * ) # otherwise, assume *nix + lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 + ;; + esac + ;; + *-*-cygwin* ) + case $build in + *-*-mingw* ) # actually msys + lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin + ;; + *-*-cygwin* ) + lt_cv_to_host_file_cmd=func_convert_file_noop + ;; + * ) # otherwise, assume *nix + lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin + ;; + esac + ;; + * ) # unhandled hosts (and "normal" native builds) + lt_cv_to_host_file_cmd=func_convert_file_noop + ;; +esac + +fi + +to_host_file_cmd=$lt_cv_to_host_file_cmd +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5 +$as_echo "$lt_cv_to_host_file_cmd" >&6; } + + + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5 +$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; } +if ${lt_cv_to_tool_file_cmd+:} false; then : + $as_echo_n "(cached) " >&6 +else + #assume ordinary cross tools, or native build. +lt_cv_to_tool_file_cmd=func_convert_file_noop +case $host in + *-*-mingw* ) + case $build in + *-*-mingw* ) # actually msys + lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 + ;; + esac + ;; +esac + +fi + +to_tool_file_cmd=$lt_cv_to_tool_file_cmd +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5 +$as_echo "$lt_cv_to_tool_file_cmd" >&6; } + + + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 +$as_echo_n "checking for $LD option to reload object files... " >&6; } +if ${lt_cv_ld_reload_flag+:} false; then : + $as_echo_n "(cached) " >&6 else lt_cv_ld_reload_flag='-r' fi -{ echo "$as_me:$LINENO: result: $lt_cv_ld_reload_flag" >&5 -echo "${ECHO_T}$lt_cv_ld_reload_flag" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_reload_flag" >&5 +$as_echo "$lt_cv_ld_reload_flag" >&6; } reload_flag=$lt_cv_ld_reload_flag case $reload_flag in "" | " "*) ;; @@ -4659,6 +6383,11 @@ esac reload_cmds='$LD$reload_flag -o $output$reload_objs' case $host_os in + cygwin* | mingw* | pw32* | cegcc*) + if test "$GCC" != yes; then + reload_cmds=false + fi + ;; darwin*) if test "$GCC" = yes; then reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' @@ -4679,10 +6408,10 @@ if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}objdump", so it can be a program name with args. set dummy ${ac_tool_prefix}objdump; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_OBJDUMP+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_OBJDUMP+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$OBJDUMP"; then ac_cv_prog_OBJDUMP="$OBJDUMP" # Let the user override the test. @@ -4692,25 +6421,25 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_OBJDUMP="${ac_tool_prefix}objdump" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi OBJDUMP=$ac_cv_prog_OBJDUMP if test -n "$OBJDUMP"; then - { echo "$as_me:$LINENO: result: $OBJDUMP" >&5 -echo "${ECHO_T}$OBJDUMP" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJDUMP" >&5 +$as_echo "$OBJDUMP" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi @@ -4719,10 +6448,10 @@ ac_ct_OBJDUMP=$OBJDUMP # Extract the first word of "objdump", so it can be a program name with args. set dummy objdump; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_ac_ct_OBJDUMP+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_OBJDUMP+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_OBJDUMP"; then ac_cv_prog_ac_ct_OBJDUMP="$ac_ct_OBJDUMP" # Let the user override the test. @@ -4732,25 +6461,25 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_OBJDUMP="objdump" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi ac_ct_OBJDUMP=$ac_cv_prog_ac_ct_OBJDUMP if test -n "$ac_ct_OBJDUMP"; then - { echo "$as_me:$LINENO: result: $ac_ct_OBJDUMP" >&5 -echo "${ECHO_T}$ac_ct_OBJDUMP" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJDUMP" >&5 +$as_echo "$ac_ct_OBJDUMP" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi if test "x$ac_ct_OBJDUMP" = x; then @@ -4758,12 +6487,8 @@ else case $cross_compiling:$ac_tool_warned in yes:) -{ echo "$as_me:$LINENO: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&5 -echo "$as_me: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&2;} +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac OBJDUMP=$ac_ct_OBJDUMP @@ -4779,10 +6504,10 @@ -{ echo "$as_me:$LINENO: checking how to recognize dependent libraries" >&5 -echo $ECHO_N "checking how to recognize dependent libraries... $ECHO_C" >&6; } -if test "${lt_cv_deplibs_check_method+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to recognize dependent libraries" >&5 +$as_echo_n "checking how to recognize dependent libraries... " >&6; } +if ${lt_cv_deplibs_check_method+:} false; then : + $as_echo_n "(cached) " >&6 else lt_cv_file_magic_cmd='$MAGIC_CMD' lt_cv_file_magic_test_file= @@ -4981,8 +6706,23 @@ esac fi -{ echo "$as_me:$LINENO: result: $lt_cv_deplibs_check_method" >&5 -echo "${ECHO_T}$lt_cv_deplibs_check_method" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 +$as_echo "$lt_cv_deplibs_check_method" >&6; } + +file_magic_glob= +want_nocaseglob=no +if test "$build" = "$host"; then + case $host_os in + mingw* | pw32*) + if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then + want_nocaseglob=yes + else + file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"` + fi + ;; + esac +fi + file_magic_cmd=$lt_cv_file_magic_cmd deplibs_check_method=$lt_cv_deplibs_check_method test -z "$deplibs_check_method" && deplibs_check_method=unknown @@ -4998,13 +6738,163 @@ + + + + + + + + + + +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. +set dummy ${ac_tool_prefix}dlltool; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_DLLTOOL+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$DLLTOOL"; then + ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +DLLTOOL=$ac_cv_prog_DLLTOOL +if test -n "$DLLTOOL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 +$as_echo "$DLLTOOL" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + +fi +if test -z "$ac_cv_prog_DLLTOOL"; then + ac_ct_DLLTOOL=$DLLTOOL + # Extract the first word of "dlltool", so it can be a program name with args. +set dummy dlltool; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_DLLTOOL"; then + ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_DLLTOOL="dlltool" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL +if test -n "$ac_ct_DLLTOOL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 +$as_echo "$ac_ct_DLLTOOL" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + if test "x$ac_ct_DLLTOOL" = x; then + DLLTOOL="false" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + DLLTOOL=$ac_ct_DLLTOOL + fi +else + DLLTOOL="$ac_cv_prog_DLLTOOL" +fi + +test -z "$DLLTOOL" && DLLTOOL=dlltool + + + + + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5 +$as_echo_n "checking how to associate runtime and link libraries... " >&6; } +if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_sharedlib_from_linklib_cmd='unknown' + +case $host_os in +cygwin* | mingw* | pw32* | cegcc*) + # two different shell functions defined in ltmain.sh + # decide which to use based on capabilities of $DLLTOOL + case `$DLLTOOL --help 2>&1` in + *--identify-strict*) + lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib + ;; + *) + lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback + ;; + esac + ;; +*) + # fallback: assume linklib IS sharedlib + lt_cv_sharedlib_from_linklib_cmd="$ECHO" + ;; +esac + +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5 +$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; } +sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd +test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO + + + + + + + if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args. -set dummy ${ac_tool_prefix}ar; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_AR+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + for ac_prog in ar + do + # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. +set dummy $ac_tool_prefix$ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_AR+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$AR"; then ac_cv_prog_AR="$AR" # Let the user override the test. @@ -5014,37 +6904,41 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_AR="${ac_tool_prefix}ar" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_AR="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi AR=$ac_cv_prog_AR if test -n "$AR"; then - { echo "$as_me:$LINENO: result: $AR" >&5 -echo "${ECHO_T}$AR" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AR" >&5 +$as_echo "$AR" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi + test -n "$AR" && break + done fi -if test -z "$ac_cv_prog_AR"; then +if test -z "$AR"; then ac_ct_AR=$AR - # Extract the first word of "ar", so it can be a program name with args. -set dummy ar; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_ac_ct_AR+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + for ac_prog in ar +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_AR+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_AR"; then ac_cv_prog_ac_ct_AR="$ac_ct_AR" # Let the user override the test. @@ -5054,52 +6948,108 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_AR="ar" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_AR="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi ac_ct_AR=$ac_cv_prog_ac_ct_AR if test -n "$ac_ct_AR"; then - { echo "$as_me:$LINENO: result: $ac_ct_AR" >&5 -echo "${ECHO_T}$ac_ct_AR" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AR" >&5 +$as_echo "$ac_ct_AR" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi + + test -n "$ac_ct_AR" && break +done + if test "x$ac_ct_AR" = x; then AR="false" else case $cross_compiling:$ac_tool_warned in yes:) -{ echo "$as_me:$LINENO: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&5 -echo "$as_me: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&2;} +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac AR=$ac_ct_AR fi -else - AR="$ac_cv_prog_AR" fi -test -z "$AR" && AR=ar -test -z "$AR_FLAGS" && AR_FLAGS=cru +: ${AR=ar} +: ${AR_FLAGS=cru} + + + + + + + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5 +$as_echo_n "checking for archiver @FILE support... " >&6; } +if ${lt_cv_ar_at_file+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_ar_at_file=no + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + +int +main () +{ + + ; + return 0; +} +_ACEOF +if ac_fn_c_try_compile "$LINENO"; then : + echo conftest.$ac_objext > conftest.lst + lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5' + { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 + (eval $lt_ar_try) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } + if test "$ac_status" -eq 0; then + # Ensure the archiver fails upon bogus file names. + rm -f conftest.$ac_objext libconftest.a + { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 + (eval $lt_ar_try) 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } + if test "$ac_status" -ne 0; then + lt_cv_ar_at_file=@ + fi + fi + rm -f conftest.* libconftest.a + +fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 +$as_echo "$lt_cv_ar_at_file" >&6; } +if test "x$lt_cv_ar_at_file" = xno; then + archiver_list_spec= +else + archiver_list_spec=$lt_cv_ar_at_file +fi @@ -5110,10 +7060,10 @@ if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. set dummy ${ac_tool_prefix}strip; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_STRIP+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_STRIP+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$STRIP"; then ac_cv_prog_STRIP="$STRIP" # Let the user override the test. @@ -5123,25 +7073,25 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_STRIP="${ac_tool_prefix}strip" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi STRIP=$ac_cv_prog_STRIP if test -n "$STRIP"; then - { echo "$as_me:$LINENO: result: $STRIP" >&5 -echo "${ECHO_T}$STRIP" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 +$as_echo "$STRIP" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi @@ -5150,10 +7100,10 @@ ac_ct_STRIP=$STRIP # Extract the first word of "strip", so it can be a program name with args. set dummy strip; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_ac_ct_STRIP+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_STRIP+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_STRIP"; then ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. @@ -5163,25 +7113,25 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_STRIP="strip" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP if test -n "$ac_ct_STRIP"; then - { echo "$as_me:$LINENO: result: $ac_ct_STRIP" >&5 -echo "${ECHO_T}$ac_ct_STRIP" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 +$as_echo "$ac_ct_STRIP" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi if test "x$ac_ct_STRIP" = x; then @@ -5189,12 +7139,8 @@ else case $cross_compiling:$ac_tool_warned in yes:) -{ echo "$as_me:$LINENO: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&5 -echo "$as_me: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&2;} +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac STRIP=$ac_ct_STRIP @@ -5213,10 +7159,10 @@ if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args. set dummy ${ac_tool_prefix}ranlib; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_RANLIB+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_RANLIB+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$RANLIB"; then ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test. @@ -5226,25 +7172,25 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi RANLIB=$ac_cv_prog_RANLIB if test -n "$RANLIB"; then - { echo "$as_me:$LINENO: result: $RANLIB" >&5 -echo "${ECHO_T}$RANLIB" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5 +$as_echo "$RANLIB" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi @@ -5253,10 +7199,10 @@ ac_ct_RANLIB=$RANLIB # Extract the first word of "ranlib", so it can be a program name with args. set dummy ranlib; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_ac_ct_RANLIB+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_RANLIB+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_RANLIB"; then ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test. @@ -5266,25 +7212,25 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_RANLIB="ranlib" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB if test -n "$ac_ct_RANLIB"; then - { echo "$as_me:$LINENO: result: $ac_ct_RANLIB" >&5 -echo "${ECHO_T}$ac_ct_RANLIB" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5 +$as_echo "$ac_ct_RANLIB" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi if test "x$ac_ct_RANLIB" = x; then @@ -5292,12 +7238,8 @@ else case $cross_compiling:$ac_tool_warned in yes:) -{ echo "$as_me:$LINENO: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&5 -echo "$as_me: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&2;} +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac RANLIB=$ac_ct_RANLIB @@ -5386,10 +7328,10 @@ # Check for command to grab the raw symbol name followed by C symbol from nm. -{ echo "$as_me:$LINENO: checking command to parse $NM output from $compiler object" >&5 -echo $ECHO_N "checking command to parse $NM output from $compiler object... $ECHO_C" >&6; } -if test "${lt_cv_sys_global_symbol_pipe+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking command to parse $NM output from $compiler object" >&5 +$as_echo_n "checking command to parse $NM output from $compiler object... " >&6; } +if ${lt_cv_sys_global_symbol_pipe+:} false; then : + $as_echo_n "(cached) " >&6 else # These are sane defaults that work on at least a few old systems. @@ -5449,8 +7391,8 @@ lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" # Transform an extracted symbol line into symbol name and symbol address -lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" -lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" +lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" +lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" # Handle CRLF in mingw tool chain opt_cr= @@ -5486,11 +7428,12 @@ else lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" fi + lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" # Check to see that the pipe works correctly. pipe_works=no - rm -r -f conftest* + rm -f conftest* cat > conftest.$ac_ext <<_LT_EOF #ifdef __cplusplus extern "C" { @@ -5504,18 +7447,18 @@ int main(){nm_test_var='a';nm_test_func();return(0);} _LT_EOF - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then # Now try to grab the symbols. nlist=conftest.nm - if { (eval echo "$as_me:$LINENO: \"$NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist\"") >&5 + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist\""; } >&5 (eval $NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) 2>&5 ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && test -s "$nlist"; then + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && test -s "$nlist"; then # Try sorting and uniquifying the output. if sort "$nlist" | uniq > "$nlist"T; then mv -f "$nlist"T "$nlist" @@ -5527,6 +7470,18 @@ if $GREP ' nm_test_var$' "$nlist" >/dev/null; then if $GREP ' nm_test_func$' "$nlist" >/dev/null; then cat <<_LT_EOF > conftest.$ac_ext +/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ +#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) +/* DATA imports from DLLs on WIN32 con't be const, because runtime + relocations are performed -- see ld's documentation on pseudo-relocs. */ +# define LT_DLSYM_CONST +#elif defined(__osf__) +/* This system does not cope well with relocations in const data. */ +# define LT_DLSYM_CONST +#else +# define LT_DLSYM_CONST const +#endif + #ifdef __cplusplus extern "C" { #endif @@ -5538,7 +7493,7 @@ cat <<_LT_EOF >> conftest.$ac_ext /* The mapping between symbol names and symbols. */ -const struct { +LT_DLSYM_CONST struct { const char *name; void *address; } @@ -5564,19 +7519,19 @@ _LT_EOF # Now try linking the two files. mv conftest.$ac_objext conftstm.$ac_objext - lt_save_LIBS="$LIBS" - lt_save_CFLAGS="$CFLAGS" + lt_globsym_save_LIBS=$LIBS + lt_globsym_save_CFLAGS=$CFLAGS LIBS="conftstm.$ac_objext" CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 (eval $ac_link) 2>&5 ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && test -s conftest${ac_exeext}; then + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && test -s conftest${ac_exeext}; then pipe_works=yes fi - LIBS="$lt_save_LIBS" - CFLAGS="$lt_save_CFLAGS" + LIBS=$lt_globsym_save_LIBS + CFLAGS=$lt_globsym_save_CFLAGS else echo "cannot find nm_test_func in $nlist" >&5 fi @@ -5600,18 +7555,35 @@ fi done -fi +fi + +if test -z "$lt_cv_sys_global_symbol_pipe"; then + lt_cv_sys_global_symbol_to_cdecl= +fi +if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed" >&5 +$as_echo "failed" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5 +$as_echo "ok" >&6; } +fi + +# Response file support. +if test "$lt_cv_nm_interface" = "MS dumpbin"; then + nm_file_list_spec='@' +elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then + nm_file_list_spec='@' +fi + + + + + + + + + -if test -z "$lt_cv_sys_global_symbol_pipe"; then - lt_cv_sys_global_symbol_to_cdecl= -fi -if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then - { echo "$as_me:$LINENO: result: failed" >&5 -echo "${ECHO_T}failed" >&6; } -else - { echo "$as_me:$LINENO: result: ok" >&5 -echo "${ECHO_T}ok" >&6; } -fi @@ -5629,14 +7601,45 @@ +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 +$as_echo_n "checking for sysroot... " >&6; } +# Check whether --with-sysroot was given. +if test "${with_sysroot+set}" = set; then : + withval=$with_sysroot; +else + with_sysroot=no +fi + + +lt_sysroot= +case ${with_sysroot} in #( + yes) + if test "$GCC" = yes; then + lt_sysroot=`$CC --print-sysroot 2>/dev/null` + fi + ;; #( + /*) + lt_sysroot=`echo "$with_sysroot" | sed -e "$sed_quote_subst"` + ;; #( + no|'') + ;; #( + *) + { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_sysroot}" >&5 +$as_echo "${with_sysroot}" >&6; } + as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5 + ;; +esac + + { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5 +$as_echo "${lt_sysroot:-no}" >&6; } # Check whether --enable-libtool-lock was given. -if test "${enable_libtool_lock+set}" = set; then +if test "${enable_libtool_lock+set}" = set; then : enableval=$enable_libtool_lock; fi @@ -5648,11 +7651,11 @@ ia64-*-hpux*) # Find out which ABI we are using. echo 'int i;' > conftest.$ac_ext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then case `/usr/bin/file conftest.$ac_objext` in *ELF-32*) HPUX_IA64_MODE="32" @@ -5667,11 +7670,11 @@ *-*-irix6*) # Find out which ABI we are using. echo '#line '$LINENO' "configure"' > conftest.$ac_ext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then if test "$lt_cv_prog_gnu_ld" = yes; then case `/usr/bin/file conftest.$ac_objext` in *32-bit*) @@ -5705,11 +7708,11 @@ s390*-*linux*|s390*-*tpf*|sparc*-*linux*) # Find out which ABI we are using. echo 'int i;' > conftest.$ac_ext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then case `/usr/bin/file conftest.o` in *32-bit*) case $host in @@ -5758,10 +7761,10 @@ # On SCO OpenServer 5, we need -belf to get full-featured binaries. SAVE_CFLAGS="$CFLAGS" CFLAGS="$CFLAGS -belf" - { echo "$as_me:$LINENO: checking whether the C compiler needs -belf" >&5 -echo $ECHO_N "checking whether the C compiler needs -belf... $ECHO_C" >&6; } -if test "${lt_cv_cc_needs_belf+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler needs -belf" >&5 +$as_echo_n "checking whether the C compiler needs -belf... " >&6; } +if ${lt_cv_cc_needs_belf+:} false; then : + $as_echo_n "(cached) " >&6 else ac_ext=c ac_cpp='$CPP $CPPFLAGS' @@ -5769,11 +7772,7 @@ ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int @@ -5784,34 +7783,13 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then +if ac_fn_c_try_link "$LINENO"; then : lt_cv_cc_needs_belf=yes else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - lt_cv_cc_needs_belf=no + lt_cv_cc_needs_belf=no fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' @@ -5819,8 +7797,8 @@ ac_compiler_gnu=$ac_cv_c_compiler_gnu fi -{ echo "$as_me:$LINENO: result: $lt_cv_cc_needs_belf" >&5 -echo "${ECHO_T}$lt_cv_cc_needs_belf" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5 +$as_echo "$lt_cv_cc_needs_belf" >&6; } if test x"$lt_cv_cc_needs_belf" != x"yes"; then # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf CFLAGS="$SAVE_CFLAGS" @@ -5829,11 +7807,11 @@ sparc*-*solaris*) # Find out which ABI we are using. echo 'int i;' > conftest.$ac_ext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then case `/usr/bin/file conftest.o` in *64-bit*) case $lt_cv_prog_gnu_ld in @@ -5853,1074 +7831,588 @@ need_locks="$enable_libtool_lock" - - case $host_os in - rhapsody* | darwin*) - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}dsymutil", so it can be a program name with args. -set dummy ${ac_tool_prefix}dsymutil; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_DSYMUTIL+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. +set dummy ${ac_tool_prefix}mt; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_MANIFEST_TOOL+:} false; then : + $as_echo_n "(cached) " >&6 else - if test -n "$DSYMUTIL"; then - ac_cv_prog_DSYMUTIL="$DSYMUTIL" # Let the user override the test. + if test -n "$MANIFEST_TOOL"; then + ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_DSYMUTIL="${ac_tool_prefix}dsymutil" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi -DSYMUTIL=$ac_cv_prog_DSYMUTIL -if test -n "$DSYMUTIL"; then - { echo "$as_me:$LINENO: result: $DSYMUTIL" >&5 -echo "${ECHO_T}$DSYMUTIL" >&6; } +MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL +if test -n "$MANIFEST_TOOL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5 +$as_echo "$MANIFEST_TOOL" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi fi -if test -z "$ac_cv_prog_DSYMUTIL"; then - ac_ct_DSYMUTIL=$DSYMUTIL - # Extract the first word of "dsymutil", so it can be a program name with args. -set dummy dsymutil; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_ac_ct_DSYMUTIL+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +if test -z "$ac_cv_prog_MANIFEST_TOOL"; then + ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL + # Extract the first word of "mt", so it can be a program name with args. +set dummy mt; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then : + $as_echo_n "(cached) " >&6 else - if test -n "$ac_ct_DSYMUTIL"; then - ac_cv_prog_ac_ct_DSYMUTIL="$ac_ct_DSYMUTIL" # Let the user override the test. + if test -n "$ac_ct_MANIFEST_TOOL"; then + ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_DSYMUTIL="dsymutil" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi -ac_ct_DSYMUTIL=$ac_cv_prog_ac_ct_DSYMUTIL -if test -n "$ac_ct_DSYMUTIL"; then - { echo "$as_me:$LINENO: result: $ac_ct_DSYMUTIL" >&5 -echo "${ECHO_T}$ac_ct_DSYMUTIL" >&6; } +ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL +if test -n "$ac_ct_MANIFEST_TOOL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5 +$as_echo "$ac_ct_MANIFEST_TOOL" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi - if test "x$ac_ct_DSYMUTIL" = x; then - DSYMUTIL=":" + if test "x$ac_ct_MANIFEST_TOOL" = x; then + MANIFEST_TOOL=":" else case $cross_compiling:$ac_tool_warned in yes:) -{ echo "$as_me:$LINENO: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&5 -echo "$as_me: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&2;} +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac - DSYMUTIL=$ac_ct_DSYMUTIL - fi -else - DSYMUTIL="$ac_cv_prog_DSYMUTIL" -fi - - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}nmedit", so it can be a program name with args. -set dummy ${ac_tool_prefix}nmedit; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_NMEDIT+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - if test -n "$NMEDIT"; then - ac_cv_prog_NMEDIT="$NMEDIT" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_NMEDIT="${ac_tool_prefix}nmedit" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 + MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL fi -done -done -IFS=$as_save_IFS - -fi -fi -NMEDIT=$ac_cv_prog_NMEDIT -if test -n "$NMEDIT"; then - { echo "$as_me:$LINENO: result: $NMEDIT" >&5 -echo "${ECHO_T}$NMEDIT" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL" fi - -fi -if test -z "$ac_cv_prog_NMEDIT"; then - ac_ct_NMEDIT=$NMEDIT - # Extract the first word of "nmedit", so it can be a program name with args. -set dummy nmedit; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_ac_ct_NMEDIT+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5 +$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; } +if ${lt_cv_path_mainfest_tool+:} false; then : + $as_echo_n "(cached) " >&6 else - if test -n "$ac_ct_NMEDIT"; then - ac_cv_prog_ac_ct_NMEDIT="$ac_ct_NMEDIT" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_NMEDIT="nmedit" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 + lt_cv_path_mainfest_tool=no + echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5 + $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out + cat conftest.err >&5 + if $GREP 'Manifest Tool' conftest.out > /dev/null; then + lt_cv_path_mainfest_tool=yes fi -done -done -IFS=$as_save_IFS - -fi + rm -f conftest* fi -ac_ct_NMEDIT=$ac_cv_prog_ac_ct_NMEDIT -if test -n "$ac_ct_NMEDIT"; then - { echo "$as_me:$LINENO: result: $ac_ct_NMEDIT" >&5 -echo "${ECHO_T}$ac_ct_NMEDIT" >&6; } -else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } -fi - - if test "x$ac_ct_NMEDIT" = x; then - NMEDIT=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ echo "$as_me:$LINENO: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&5 -echo "$as_me: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&2;} -ac_tool_warned=yes ;; -esac - NMEDIT=$ac_ct_NMEDIT - fi -else - NMEDIT="$ac_cv_prog_NMEDIT" +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 +$as_echo "$lt_cv_path_mainfest_tool" >&6; } +if test "x$lt_cv_path_mainfest_tool" != xyes; then + MANIFEST_TOOL=: fi - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}lipo", so it can be a program name with args. -set dummy ${ac_tool_prefix}lipo; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_LIPO+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - if test -n "$LIPO"; then - ac_cv_prog_LIPO="$LIPO" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_LIPO="${ac_tool_prefix}lipo" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done -done -IFS=$as_save_IFS - -fi -fi -LIPO=$ac_cv_prog_LIPO -if test -n "$LIPO"; then - { echo "$as_me:$LINENO: result: $LIPO" >&5 -echo "${ECHO_T}$LIPO" >&6; } -else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } -fi -fi -if test -z "$ac_cv_prog_LIPO"; then - ac_ct_LIPO=$LIPO - # Extract the first word of "lipo", so it can be a program name with args. -set dummy lipo; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_ac_ct_LIPO+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - if test -n "$ac_ct_LIPO"; then - ac_cv_prog_ac_ct_LIPO="$ac_ct_LIPO" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_LIPO="lipo" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done -done -IFS=$as_save_IFS -fi -fi -ac_ct_LIPO=$ac_cv_prog_ac_ct_LIPO -if test -n "$ac_ct_LIPO"; then - { echo "$as_me:$LINENO: result: $ac_ct_LIPO" >&5 -echo "${ECHO_T}$ac_ct_LIPO" >&6; } -else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } -fi - if test "x$ac_ct_LIPO" = x; then - LIPO=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ echo "$as_me:$LINENO: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&5 -echo "$as_me: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&2;} -ac_tool_warned=yes ;; -esac - LIPO=$ac_ct_LIPO - fi -else - LIPO="$ac_cv_prog_LIPO" -fi + case $host_os in + rhapsody* | darwin*) if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}otool", so it can be a program name with args. -set dummy ${ac_tool_prefix}otool; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_OTOOL+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + # Extract the first word of "${ac_tool_prefix}dsymutil", so it can be a program name with args. +set dummy ${ac_tool_prefix}dsymutil; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_DSYMUTIL+:} false; then : + $as_echo_n "(cached) " >&6 else - if test -n "$OTOOL"; then - ac_cv_prog_OTOOL="$OTOOL" # Let the user override the test. + if test -n "$DSYMUTIL"; then + ac_cv_prog_DSYMUTIL="$DSYMUTIL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_OTOOL="${ac_tool_prefix}otool" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_DSYMUTIL="${ac_tool_prefix}dsymutil" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi -OTOOL=$ac_cv_prog_OTOOL -if test -n "$OTOOL"; then - { echo "$as_me:$LINENO: result: $OTOOL" >&5 -echo "${ECHO_T}$OTOOL" >&6; } +DSYMUTIL=$ac_cv_prog_DSYMUTIL +if test -n "$DSYMUTIL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DSYMUTIL" >&5 +$as_echo "$DSYMUTIL" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi fi -if test -z "$ac_cv_prog_OTOOL"; then - ac_ct_OTOOL=$OTOOL - # Extract the first word of "otool", so it can be a program name with args. -set dummy otool; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_ac_ct_OTOOL+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +if test -z "$ac_cv_prog_DSYMUTIL"; then + ac_ct_DSYMUTIL=$DSYMUTIL + # Extract the first word of "dsymutil", so it can be a program name with args. +set dummy dsymutil; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_DSYMUTIL+:} false; then : + $as_echo_n "(cached) " >&6 else - if test -n "$ac_ct_OTOOL"; then - ac_cv_prog_ac_ct_OTOOL="$ac_ct_OTOOL" # Let the user override the test. + if test -n "$ac_ct_DSYMUTIL"; then + ac_cv_prog_ac_ct_DSYMUTIL="$ac_ct_DSYMUTIL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_OTOOL="otool" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_DSYMUTIL="dsymutil" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi -ac_ct_OTOOL=$ac_cv_prog_ac_ct_OTOOL -if test -n "$ac_ct_OTOOL"; then - { echo "$as_me:$LINENO: result: $ac_ct_OTOOL" >&5 -echo "${ECHO_T}$ac_ct_OTOOL" >&6; } +ac_ct_DSYMUTIL=$ac_cv_prog_ac_ct_DSYMUTIL +if test -n "$ac_ct_DSYMUTIL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DSYMUTIL" >&5 +$as_echo "$ac_ct_DSYMUTIL" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi - if test "x$ac_ct_OTOOL" = x; then - OTOOL=":" + if test "x$ac_ct_DSYMUTIL" = x; then + DSYMUTIL=":" else case $cross_compiling:$ac_tool_warned in yes:) -{ echo "$as_me:$LINENO: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&5 -echo "$as_me: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&2;} +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac - OTOOL=$ac_ct_OTOOL + DSYMUTIL=$ac_ct_DSYMUTIL fi else - OTOOL="$ac_cv_prog_OTOOL" + DSYMUTIL="$ac_cv_prog_DSYMUTIL" fi if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}otool64", so it can be a program name with args. -set dummy ${ac_tool_prefix}otool64; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_OTOOL64+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + # Extract the first word of "${ac_tool_prefix}nmedit", so it can be a program name with args. +set dummy ${ac_tool_prefix}nmedit; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_NMEDIT+:} false; then : + $as_echo_n "(cached) " >&6 else - if test -n "$OTOOL64"; then - ac_cv_prog_OTOOL64="$OTOOL64" # Let the user override the test. + if test -n "$NMEDIT"; then + ac_cv_prog_NMEDIT="$NMEDIT" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_OTOOL64="${ac_tool_prefix}otool64" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_NMEDIT="${ac_tool_prefix}nmedit" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi -OTOOL64=$ac_cv_prog_OTOOL64 -if test -n "$OTOOL64"; then - { echo "$as_me:$LINENO: result: $OTOOL64" >&5 -echo "${ECHO_T}$OTOOL64" >&6; } +NMEDIT=$ac_cv_prog_NMEDIT +if test -n "$NMEDIT"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $NMEDIT" >&5 +$as_echo "$NMEDIT" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi fi -if test -z "$ac_cv_prog_OTOOL64"; then - ac_ct_OTOOL64=$OTOOL64 - # Extract the first word of "otool64", so it can be a program name with args. -set dummy otool64; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_ac_ct_OTOOL64+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - if test -n "$ac_ct_OTOOL64"; then - ac_cv_prog_ac_ct_OTOOL64="$ac_ct_OTOOL64" # Let the user override the test. +if test -z "$ac_cv_prog_NMEDIT"; then + ac_ct_NMEDIT=$NMEDIT + # Extract the first word of "nmedit", so it can be a program name with args. +set dummy nmedit; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_NMEDIT+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$ac_ct_NMEDIT"; then + ac_cv_prog_ac_ct_NMEDIT="$ac_ct_NMEDIT" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_OTOOL64="otool64" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_NMEDIT="nmedit" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi -ac_ct_OTOOL64=$ac_cv_prog_ac_ct_OTOOL64 -if test -n "$ac_ct_OTOOL64"; then - { echo "$as_me:$LINENO: result: $ac_ct_OTOOL64" >&5 -echo "${ECHO_T}$ac_ct_OTOOL64" >&6; } +ac_ct_NMEDIT=$ac_cv_prog_ac_ct_NMEDIT +if test -n "$ac_ct_NMEDIT"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_NMEDIT" >&5 +$as_echo "$ac_ct_NMEDIT" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi - if test "x$ac_ct_OTOOL64" = x; then - OTOOL64=":" + if test "x$ac_ct_NMEDIT" = x; then + NMEDIT=":" else case $cross_compiling:$ac_tool_warned in yes:) -{ echo "$as_me:$LINENO: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&5 -echo "$as_me: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&2;} +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac - OTOOL64=$ac_ct_OTOOL64 + NMEDIT=$ac_ct_NMEDIT fi else - OTOOL64="$ac_cv_prog_OTOOL64" + NMEDIT="$ac_cv_prog_NMEDIT" fi - - - - - - - - - - - - - - - - - - - - - - - - - - - { echo "$as_me:$LINENO: checking for -single_module linker flag" >&5 -echo $ECHO_N "checking for -single_module linker flag... $ECHO_C" >&6; } -if test "${lt_cv_apple_cc_single_mod+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - lt_cv_apple_cc_single_mod=no - if test -z "${LT_MULTI_MODULE}"; then - # By default we will add the -single_module flag. You can override - # by either setting the environment variable LT_MULTI_MODULE - # non-empty at configure time, or by adding -multi_module to the - # link flags. - rm -rf libconftest.dylib* - echo "int foo(void){return 1;}" > conftest.c - echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ --dynamiclib -Wl,-single_module conftest.c" >&5 - $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ - -dynamiclib -Wl,-single_module conftest.c 2>conftest.err - _lt_result=$? - if test -f libconftest.dylib && test ! -s conftest.err && test $_lt_result = 0; then - lt_cv_apple_cc_single_mod=yes - else - cat conftest.err >&5 - fi - rm -rf libconftest.dylib* - rm -f conftest.* - fi -fi -{ echo "$as_me:$LINENO: result: $lt_cv_apple_cc_single_mod" >&5 -echo "${ECHO_T}$lt_cv_apple_cc_single_mod" >&6; } - { echo "$as_me:$LINENO: checking for -exported_symbols_list linker flag" >&5 -echo $ECHO_N "checking for -exported_symbols_list linker flag... $ECHO_C" >&6; } -if test "${lt_cv_ld_exported_symbols_list+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}lipo", so it can be a program name with args. +set dummy ${ac_tool_prefix}lipo; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_LIPO+:} false; then : + $as_echo_n "(cached) " >&6 else - lt_cv_ld_exported_symbols_list=no - save_LDFLAGS=$LDFLAGS - echo "_main" > conftest.sym - LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym" - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then - lt_cv_ld_exported_symbols_list=yes + if test -n "$LIPO"; then + ac_cv_prog_LIPO="$LIPO" # Let the user override the test. else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_LIPO="${ac_tool_prefix}lipo" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS - lt_cv_ld_exported_symbols_list=no fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext - LDFLAGS="$save_LDFLAGS" - fi -{ echo "$as_me:$LINENO: result: $lt_cv_ld_exported_symbols_list" >&5 -echo "${ECHO_T}$lt_cv_ld_exported_symbols_list" >&6; } - { echo "$as_me:$LINENO: checking for -force_load linker flag" >&5 -echo $ECHO_N "checking for -force_load linker flag... $ECHO_C" >&6; } -if test "${lt_cv_ld_force_load+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +LIPO=$ac_cv_prog_LIPO +if test -n "$LIPO"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIPO" >&5 +$as_echo "$LIPO" >&6; } else - lt_cv_ld_force_load=no - cat > conftest.c << _LT_EOF -int forced_loaded() { return 2;} -_LT_EOF - echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&5 - $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 - echo "$AR cru libconftest.a conftest.o" >&5 - $AR cru libconftest.a conftest.o 2>&5 - echo "$RANLIB libconftest.a" >&5 - $RANLIB libconftest.a 2>&5 - cat > conftest.c << _LT_EOF -int main() { return 0;} -_LT_EOF - echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&5 - $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err - _lt_result=$? - if test -f conftest && test ! -s conftest.err && test $_lt_result = 0 && $GREP forced_load conftest 2>&1 >/dev/null; then - lt_cv_ld_force_load=yes - else - cat conftest.err >&5 - fi - rm -f conftest.err libconftest.a conftest conftest.c - rm -rf conftest.dSYM - + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi -{ echo "$as_me:$LINENO: result: $lt_cv_ld_force_load" >&5 -echo "${ECHO_T}$lt_cv_ld_force_load" >&6; } - case $host_os in - rhapsody* | darwin1.[012]) - _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;; - darwin1.*) - _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; - darwin*) # darwin 5.x on - # if running on 10.5 or later, the deployment target defaults - # to the OS version, if on x86, and 10.4, the deployment - # target defaults to 10.4. Don't you love it? - case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in - 10.0,*86*-darwin8*|10.0,*-darwin[91]*) - _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; - 10.[012]*) - _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; - 10.*) - _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; - esac - ;; - esac - if test "$lt_cv_apple_cc_single_mod" = "yes"; then - _lt_dar_single_mod='$single_module' - fi - if test "$lt_cv_ld_exported_symbols_list" = "yes"; then - _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym' - else - _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}' - fi - if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then - _lt_dsymutil='~$DSYMUTIL $lib || :' - else - _lt_dsymutil= - fi - ;; - esac -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu -{ echo "$as_me:$LINENO: checking how to run the C preprocessor" >&5 -echo $ECHO_N "checking how to run the C preprocessor... $ECHO_C" >&6; } -# On Suns, sometimes $CPP names a directory. -if test -n "$CPP" && test -d "$CPP"; then - CPP= -fi -if test -z "$CPP"; then - if test "${ac_cv_prog_CPP+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - # Double quotes because CPP needs to be expanded - for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp" - do - ac_preproc_ok=false -for ac_c_preproc_warn_flag in '' yes -do - # Use a header file that comes with gcc, so configuring glibc - # with a fresh cross-compiler works. - # Prefer to if __STDC__ is defined, since - # exists even on freestanding compilers. - # On the NeXT, cc -E runs the code through the compiler's parser, - # not just through cpp. "Syntax error" is here to catch this case. - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -#ifdef __STDC__ -# include -#else -# include -#endif - Syntax error -_ACEOF -if { (ac_try="$ac_cpp conftest.$ac_ext" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_cpp conftest.$ac_ext") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } >/dev/null && { - test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" || - test ! -s conftest.err - }; then - : -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - # Broken: fails on valid input. -continue fi - -rm -f conftest.err conftest.$ac_ext - - # OK, works on sane cases. Now check whether nonexistent headers - # can be detected and how. - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -#include -_ACEOF -if { (ac_try="$ac_cpp conftest.$ac_ext" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_cpp conftest.$ac_ext") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } >/dev/null && { - test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" || - test ! -s conftest.err - }; then - # Broken: success on invalid input. -continue +if test -z "$ac_cv_prog_LIPO"; then + ac_ct_LIPO=$LIPO + # Extract the first word of "lipo", so it can be a program name with args. +set dummy lipo; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_LIPO+:} false; then : + $as_echo_n "(cached) " >&6 else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - # Passes both tests. -ac_preproc_ok=: -break -fi - -rm -f conftest.err conftest.$ac_ext - + if test -n "$ac_ct_LIPO"; then + ac_cv_prog_ac_ct_LIPO="$ac_ct_LIPO" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_LIPO="lipo" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi done -# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. -rm -f conftest.err conftest.$ac_ext -if $ac_preproc_ok; then - break -fi - - done - ac_cv_prog_CPP=$CPP + done +IFS=$as_save_IFS fi - CPP=$ac_cv_prog_CPP -else - ac_cv_prog_CPP=$CPP fi -{ echo "$as_me:$LINENO: result: $CPP" >&5 -echo "${ECHO_T}$CPP" >&6; } -ac_preproc_ok=false -for ac_c_preproc_warn_flag in '' yes -do - # Use a header file that comes with gcc, so configuring glibc - # with a fresh cross-compiler works. - # Prefer to if __STDC__ is defined, since - # exists even on freestanding compilers. - # On the NeXT, cc -E runs the code through the compiler's parser, - # not just through cpp. "Syntax error" is here to catch this case. - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -#ifdef __STDC__ -# include -#else -# include -#endif - Syntax error -_ACEOF -if { (ac_try="$ac_cpp conftest.$ac_ext" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_cpp conftest.$ac_ext") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } >/dev/null && { - test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" || - test ! -s conftest.err - }; then - : +ac_ct_LIPO=$ac_cv_prog_ac_ct_LIPO +if test -n "$ac_ct_LIPO"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_LIPO" >&5 +$as_echo "$ac_ct_LIPO" >&6; } else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - # Broken: fails on valid input. -continue + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi -rm -f conftest.err conftest.$ac_ext - - # OK, works on sane cases. Now check whether nonexistent headers - # can be detected and how. - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -#include -_ACEOF -if { (ac_try="$ac_cpp conftest.$ac_ext" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; + if test "x$ac_ct_LIPO" = x; then + LIPO=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_cpp conftest.$ac_ext") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } >/dev/null && { - test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" || - test ! -s conftest.err - }; then - # Broken: success on invalid input. -continue + LIPO=$ac_ct_LIPO + fi else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - # Passes both tests. -ac_preproc_ok=: -break + LIPO="$ac_cv_prog_LIPO" fi -rm -f conftest.err conftest.$ac_ext - + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}otool", so it can be a program name with args. +set dummy ${ac_tool_prefix}otool; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_OTOOL+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$OTOOL"; then + ac_cv_prog_OTOOL="$OTOOL" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_OTOOL="${ac_tool_prefix}otool" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi done -# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. -rm -f conftest.err conftest.$ac_ext -if $ac_preproc_ok; then - : + done +IFS=$as_save_IFS + +fi +fi +OTOOL=$ac_cv_prog_OTOOL +if test -n "$OTOOL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL" >&5 +$as_echo "$OTOOL" >&6; } else - { { echo "$as_me:$LINENO: error: C preprocessor \"$CPP\" fails sanity check -See \`config.log' for more details." >&5 -echo "$as_me: error: C preprocessor \"$CPP\" fails sanity check -See \`config.log' for more details." >&2;} - { (exit 1); exit 1; }; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - -{ echo "$as_me:$LINENO: checking for ANSI C header files" >&5 -echo $ECHO_N "checking for ANSI C header files... $ECHO_C" >&6; } -if test "${ac_cv_header_stdc+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +fi +if test -z "$ac_cv_prog_OTOOL"; then + ac_ct_OTOOL=$OTOOL + # Extract the first word of "otool", so it can be a program name with args. +set dummy otool; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_OTOOL+:} false; then : + $as_echo_n "(cached) " >&6 else - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -#include -#include -#include -#include - -int -main () -{ - - ; - return 0; -} -_ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then - ac_cv_header_stdc=yes + if test -n "$ac_ct_OTOOL"; then + ac_cv_prog_ac_ct_OTOOL="$ac_ct_OTOOL" # Let the user override the test. else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_OTOOL="otool" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS - ac_cv_header_stdc=no fi - -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - -if test $ac_cv_header_stdc = yes; then - # SunOS 4.x string.h does not declare mem*, contrary to ANSI. - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -#include - -_ACEOF -if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | - $EGREP "memchr" >/dev/null 2>&1; then - : +fi +ac_ct_OTOOL=$ac_cv_prog_ac_ct_OTOOL +if test -n "$ac_ct_OTOOL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL" >&5 +$as_echo "$ac_ct_OTOOL" >&6; } else - ac_cv_header_stdc=no + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi -rm -f -r conftest* + if test "x$ac_ct_OTOOL" = x; then + OTOOL=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + OTOOL=$ac_ct_OTOOL + fi +else + OTOOL="$ac_cv_prog_OTOOL" fi -if test $ac_cv_header_stdc = yes; then - # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -#include - -_ACEOF -if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | - $EGREP "free" >/dev/null 2>&1; then - : + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}otool64", so it can be a program name with args. +set dummy ${ac_tool_prefix}otool64; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_OTOOL64+:} false; then : + $as_echo_n "(cached) " >&6 else - ac_cv_header_stdc=no -fi -rm -f -r conftest* + if test -n "$OTOOL64"; then + ac_cv_prog_OTOOL64="$OTOOL64" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_OTOOL64="${ac_tool_prefix}otool64" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS fi - -if test $ac_cv_header_stdc = yes; then - # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. - if test "$cross_compiling" = yes; then - : +fi +OTOOL64=$ac_cv_prog_OTOOL64 +if test -n "$OTOOL64"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL64" >&5 +$as_echo "$OTOOL64" >&6; } else - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -#include -#include -#if ((' ' & 0x0FF) == 0x020) -# define ISLOWER(c) ('a' <= (c) && (c) <= 'z') -# define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) -#else -# define ISLOWER(c) \ - (('a' <= (c) && (c) <= 'i') \ - || ('j' <= (c) && (c) <= 'r') \ - || ('s' <= (c) && (c) <= 'z')) -# define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) -#endif + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi -#define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) -int -main () -{ - int i; - for (i = 0; i < 256; i++) - if (XOR (islower (i), ISLOWER (i)) - || toupper (i) != TOUPPER (i)) - return 2; - return 0; -} -_ACEOF -rm -f conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { ac_try='./conftest$ac_exeext' - { (case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; }; then - : + +fi +if test -z "$ac_cv_prog_OTOOL64"; then + ac_ct_OTOOL64=$OTOOL64 + # Extract the first word of "otool64", so it can be a program name with args. +set dummy otool64; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_OTOOL64+:} false; then : + $as_echo_n "(cached) " >&6 else - echo "$as_me: program exited with status $ac_status" >&5 -echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 + if test -n "$ac_ct_OTOOL64"; then + ac_cv_prog_ac_ct_OTOOL64="$ac_ct_OTOOL64" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_ac_ct_OTOOL64="otool64" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS -( exit $ac_status ) -ac_cv_header_stdc=no fi -rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext conftest.$ac_objext conftest.$ac_ext fi - - +ac_ct_OTOOL64=$ac_cv_prog_ac_ct_OTOOL64 +if test -n "$ac_ct_OTOOL64"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL64" >&5 +$as_echo "$ac_ct_OTOOL64" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi + + if test "x$ac_ct_OTOOL64" = x; then + OTOOL64=":" + else + case $cross_compiling:$ac_tool_warned in +yes:) +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} +ac_tool_warned=yes ;; +esac + OTOOL64=$ac_ct_OTOOL64 + fi +else + OTOOL64="$ac_cv_prog_OTOOL64" fi -{ echo "$as_me:$LINENO: result: $ac_cv_header_stdc" >&5 -echo "${ECHO_T}$ac_cv_header_stdc" >&6; } -if test $ac_cv_header_stdc = yes; then -cat >>confdefs.h <<\_ACEOF -#define STDC_HEADERS 1 -_ACEOF -fi -# On IRIX 5.3, sys/types and inttypes.h are conflicting. @@ -6930,117 +8422,156 @@ -for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \ - inttypes.h stdint.h unistd.h -do -as_ac_Header=`echo "ac_cv_header_$ac_header" | $as_tr_sh` -{ echo "$as_me:$LINENO: checking for $ac_header" >&5 -echo $ECHO_N "checking for $ac_header... $ECHO_C" >&6; } -if { as_var=$as_ac_Header; eval "test \"\${$as_var+set}\" = set"; }; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -$ac_includes_default -#include <$ac_header> -_ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then - eval "$as_ac_Header=yes" -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - eval "$as_ac_Header=no" -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -ac_res=`eval echo '${'$as_ac_Header'}'` - { echo "$as_me:$LINENO: result: $ac_res" >&5 -echo "${ECHO_T}$ac_res" >&6; } -if test `eval echo '${'$as_ac_Header'}'` = yes; then - cat >>confdefs.h <<_ACEOF -#define `echo "HAVE_$ac_header" | $as_tr_cpp` 1 -_ACEOF -fi -done -for ac_header in dlfcn.h -do -as_ac_Header=`echo "ac_cv_header_$ac_header" | $as_tr_sh` -{ echo "$as_me:$LINENO: checking for $ac_header" >&5 -echo $ECHO_N "checking for $ac_header... $ECHO_C" >&6; } -if { as_var=$as_ac_Header; eval "test \"\${$as_var+set}\" = set"; }; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + + + + + + + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -single_module linker flag" >&5 +$as_echo_n "checking for -single_module linker flag... " >&6; } +if ${lt_cv_apple_cc_single_mod+:} false; then : + $as_echo_n "(cached) " >&6 else - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + lt_cv_apple_cc_single_mod=no + if test -z "${LT_MULTI_MODULE}"; then + # By default we will add the -single_module flag. You can override + # by either setting the environment variable LT_MULTI_MODULE + # non-empty at configure time, or by adding -multi_module to the + # link flags. + rm -rf libconftest.dylib* + echo "int foo(void){return 1;}" > conftest.c + echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ +-dynamiclib -Wl,-single_module conftest.c" >&5 + $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ + -dynamiclib -Wl,-single_module conftest.c 2>conftest.err + _lt_result=$? + if test -f libconftest.dylib && test ! -s conftest.err && test $_lt_result = 0; then + lt_cv_apple_cc_single_mod=yes + else + cat conftest.err >&5 + fi + rm -rf libconftest.dylib* + rm -f conftest.* + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_apple_cc_single_mod" >&5 +$as_echo "$lt_cv_apple_cc_single_mod" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -exported_symbols_list linker flag" >&5 +$as_echo_n "checking for -exported_symbols_list linker flag... " >&6; } +if ${lt_cv_ld_exported_symbols_list+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_ld_exported_symbols_list=no + save_LDFLAGS=$LDFLAGS + echo "_main" > conftest.sym + LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ -$ac_includes_default -#include <$ac_header> +int +main () +{ + + ; + return 0; +} _ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then - eval "$as_ac_Header=yes" +if ac_fn_c_try_link "$LINENO"; then : + lt_cv_ld_exported_symbols_list=yes else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 + lt_cv_ld_exported_symbols_list=no +fi +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + LDFLAGS="$save_LDFLAGS" - eval "$as_ac_Header=no" fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_exported_symbols_list" >&5 +$as_echo "$lt_cv_ld_exported_symbols_list" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -force_load linker flag" >&5 +$as_echo_n "checking for -force_load linker flag... " >&6; } +if ${lt_cv_ld_force_load+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_ld_force_load=no + cat > conftest.c << _LT_EOF +int forced_loaded() { return 2;} +_LT_EOF + echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&5 + $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 + echo "$AR cru libconftest.a conftest.o" >&5 + $AR cru libconftest.a conftest.o 2>&5 + echo "$RANLIB libconftest.a" >&5 + $RANLIB libconftest.a 2>&5 + cat > conftest.c << _LT_EOF +int main() { return 0;} +_LT_EOF + echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&5 + $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err + _lt_result=$? + if test -f conftest && test ! -s conftest.err && test $_lt_result = 0 && $GREP forced_load conftest 2>&1 >/dev/null; then + lt_cv_ld_force_load=yes + else + cat conftest.err >&5 + fi + rm -f conftest.err libconftest.a conftest conftest.c + rm -rf conftest.dSYM -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi -ac_res=`eval echo '${'$as_ac_Header'}'` - { echo "$as_me:$LINENO: result: $ac_res" >&5 -echo "${ECHO_T}$ac_res" >&6; } -if test `eval echo '${'$as_ac_Header'}'` = yes; then +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_force_load" >&5 +$as_echo "$lt_cv_ld_force_load" >&6; } + case $host_os in + rhapsody* | darwin1.[012]) + _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;; + darwin1.*) + _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; + darwin*) # darwin 5.x on + # if running on 10.5 or later, the deployment target defaults + # to the OS version, if on x86, and 10.4, the deployment + # target defaults to 10.4. Don't you love it? + case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in + 10.0,*86*-darwin8*|10.0,*-darwin[91]*) + _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; + 10.[012]*) + _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; + 10.*) + _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; + esac + ;; + esac + if test "$lt_cv_apple_cc_single_mod" = "yes"; then + _lt_dar_single_mod='$single_module' + fi + if test "$lt_cv_ld_exported_symbols_list" = "yes"; then + _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym' + else + _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}' + fi + if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then + _lt_dsymutil='~$DSYMUTIL $lib || :' + else + _lt_dsymutil= + fi + ;; + esac + +for ac_header in dlfcn.h +do : + ac_fn_c_check_header_compile "$LINENO" "dlfcn.h" "ac_cv_header_dlfcn_h" "$ac_includes_default +" +if test "x$ac_cv_header_dlfcn_h" = xyes; then : cat >>confdefs.h <<_ACEOF -#define `echo "HAVE_$ac_header" | $as_tr_cpp` 1 +#define HAVE_DLFCN_H 1 _ACEOF fi @@ -7059,10 +8590,10 @@ if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}as", so it can be a program name with args. set dummy ${ac_tool_prefix}as; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_AS+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_AS+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$AS"; then ac_cv_prog_AS="$AS" # Let the user override the test. @@ -7072,25 +8603,25 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_AS="${ac_tool_prefix}as" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi AS=$ac_cv_prog_AS if test -n "$AS"; then - { echo "$as_me:$LINENO: result: $AS" >&5 -echo "${ECHO_T}$AS" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AS" >&5 +$as_echo "$AS" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi @@ -7099,10 +8630,10 @@ ac_ct_AS=$AS # Extract the first word of "as", so it can be a program name with args. set dummy as; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_ac_ct_AS+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_AS+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_AS"; then ac_cv_prog_ac_ct_AS="$ac_ct_AS" # Let the user override the test. @@ -7112,25 +8643,25 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_AS="as" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi ac_ct_AS=$ac_cv_prog_ac_ct_AS if test -n "$ac_ct_AS"; then - { echo "$as_me:$LINENO: result: $ac_ct_AS" >&5 -echo "${ECHO_T}$ac_ct_AS" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AS" >&5 +$as_echo "$ac_ct_AS" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi if test "x$ac_ct_AS" = x; then @@ -7138,12 +8669,8 @@ else case $cross_compiling:$ac_tool_warned in yes:) -{ echo "$as_me:$LINENO: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&5 -echo "$as_me: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&2;} +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac AS=$ac_ct_AS @@ -7155,10 +8682,10 @@ if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. set dummy ${ac_tool_prefix}dlltool; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_DLLTOOL+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_DLLTOOL+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$DLLTOOL"; then ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. @@ -7168,25 +8695,25 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi DLLTOOL=$ac_cv_prog_DLLTOOL if test -n "$DLLTOOL"; then - { echo "$as_me:$LINENO: result: $DLLTOOL" >&5 -echo "${ECHO_T}$DLLTOOL" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 +$as_echo "$DLLTOOL" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi @@ -7195,10 +8722,10 @@ ac_ct_DLLTOOL=$DLLTOOL # Extract the first word of "dlltool", so it can be a program name with args. set dummy dlltool; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_ac_ct_DLLTOOL+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_DLLTOOL"; then ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. @@ -7208,25 +8735,25 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_DLLTOOL="dlltool" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL if test -n "$ac_ct_DLLTOOL"; then - { echo "$as_me:$LINENO: result: $ac_ct_DLLTOOL" >&5 -echo "${ECHO_T}$ac_ct_DLLTOOL" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 +$as_echo "$ac_ct_DLLTOOL" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi if test "x$ac_ct_DLLTOOL" = x; then @@ -7234,12 +8761,8 @@ else case $cross_compiling:$ac_tool_warned in yes:) -{ echo "$as_me:$LINENO: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&5 -echo "$as_me: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&2;} +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac DLLTOOL=$ac_ct_DLLTOOL @@ -7251,10 +8774,10 @@ if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}objdump", so it can be a program name with args. set dummy ${ac_tool_prefix}objdump; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_OBJDUMP+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_OBJDUMP+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$OBJDUMP"; then ac_cv_prog_OBJDUMP="$OBJDUMP" # Let the user override the test. @@ -7264,25 +8787,25 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_OBJDUMP="${ac_tool_prefix}objdump" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi OBJDUMP=$ac_cv_prog_OBJDUMP if test -n "$OBJDUMP"; then - { echo "$as_me:$LINENO: result: $OBJDUMP" >&5 -echo "${ECHO_T}$OBJDUMP" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJDUMP" >&5 +$as_echo "$OBJDUMP" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi @@ -7291,10 +8814,10 @@ ac_ct_OBJDUMP=$OBJDUMP # Extract the first word of "objdump", so it can be a program name with args. set dummy objdump; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_ac_ct_OBJDUMP+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_OBJDUMP+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_OBJDUMP"; then ac_cv_prog_ac_ct_OBJDUMP="$ac_ct_OBJDUMP" # Let the user override the test. @@ -7304,25 +8827,25 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_OBJDUMP="objdump" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi ac_ct_OBJDUMP=$ac_cv_prog_ac_ct_OBJDUMP if test -n "$ac_ct_OBJDUMP"; then - { echo "$as_me:$LINENO: result: $ac_ct_OBJDUMP" >&5 -echo "${ECHO_T}$ac_ct_OBJDUMP" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJDUMP" >&5 +$as_echo "$ac_ct_OBJDUMP" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi if test "x$ac_ct_OBJDUMP" = x; then @@ -7330,12 +8853,8 @@ else case $cross_compiling:$ac_tool_warned in yes:) -{ echo "$as_me:$LINENO: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&5 -echo "$as_me: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&2;} +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac OBJDUMP=$ac_ct_OBJDUMP @@ -7375,7 +8894,7 @@ # Check whether --with-pic was given. -if test "${with_pic+set}" = set; then +if test "${with_pic+set}" = set; then : withval=$with_pic; pic_mode="$withval" else pic_mode=default @@ -7391,7 +8910,7 @@ # Check whether --enable-fast-install was given. -if test "${enable_fast_install+set}" = set; then +if test "${enable_fast_install+set}" = set; then : enableval=$enable_fast_install; p=${PACKAGE-default} case $enableval in yes) enable_fast_install=yes ;; @@ -7473,10 +8992,10 @@ setopt NO_GLOB_SUBST fi -{ echo "$as_me:$LINENO: checking for objdir" >&5 -echo $ECHO_N "checking for objdir... $ECHO_C" >&6; } -if test "${lt_cv_objdir+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for objdir" >&5 +$as_echo_n "checking for objdir... " >&6; } +if ${lt_cv_objdir+:} false; then : + $as_echo_n "(cached) " >&6 else rm -f .libs 2>/dev/null mkdir .libs 2>/dev/null @@ -7488,8 +9007,8 @@ fi rmdir .libs 2>/dev/null fi -{ echo "$as_me:$LINENO: result: $lt_cv_objdir" >&5 -echo "${ECHO_T}$lt_cv_objdir" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_objdir" >&5 +$as_echo "$lt_cv_objdir" >&6; } objdir=$lt_cv_objdir @@ -7551,10 +9070,10 @@ case $deplibs_check_method in file_magic*) if test "$file_magic_cmd" = '$MAGIC_CMD'; then - { echo "$as_me:$LINENO: checking for ${ac_tool_prefix}file" >&5 -echo $ECHO_N "checking for ${ac_tool_prefix}file... $ECHO_C" >&6; } -if test "${lt_cv_path_MAGIC_CMD+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ${ac_tool_prefix}file" >&5 +$as_echo_n "checking for ${ac_tool_prefix}file... " >&6; } +if ${lt_cv_path_MAGIC_CMD+:} false; then : + $as_echo_n "(cached) " >&6 else case $MAGIC_CMD in [\\/*] | ?:[\\/]*) @@ -7604,11 +9123,11 @@ MAGIC_CMD="$lt_cv_path_MAGIC_CMD" if test -n "$MAGIC_CMD"; then - { echo "$as_me:$LINENO: result: $MAGIC_CMD" >&5 -echo "${ECHO_T}$MAGIC_CMD" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 +$as_echo "$MAGIC_CMD" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi @@ -7617,10 +9136,10 @@ if test -z "$lt_cv_path_MAGIC_CMD"; then if test -n "$ac_tool_prefix"; then - { echo "$as_me:$LINENO: checking for file" >&5 -echo $ECHO_N "checking for file... $ECHO_C" >&6; } -if test "${lt_cv_path_MAGIC_CMD+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for file" >&5 +$as_echo_n "checking for file... " >&6; } +if ${lt_cv_path_MAGIC_CMD+:} false; then : + $as_echo_n "(cached) " >&6 else case $MAGIC_CMD in [\\/*] | ?:[\\/]*) @@ -7670,11 +9189,11 @@ MAGIC_CMD="$lt_cv_path_MAGIC_CMD" if test -n "$MAGIC_CMD"; then - { echo "$as_me:$LINENO: result: $MAGIC_CMD" >&5 -echo "${ECHO_T}$MAGIC_CMD" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 +$as_echo "$MAGIC_CMD" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi @@ -7734,7 +9253,7 @@ echo "$lt_simple_compile_test_code" >conftest.$ac_ext eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_compiler_boilerplate=`cat conftest.err` -$RM -r conftest* +$RM conftest* ac_outfile=conftest.$ac_objext echo "$lt_simple_link_test_code" >conftest.$ac_ext @@ -7759,10 +9278,10 @@ lt_prog_compiler_no_builtin_flag=' -fno-builtin' ;; esac - { echo "$as_me:$LINENO: checking if $compiler supports -fno-rtti -fno-exceptions" >&5 -echo $ECHO_N "checking if $compiler supports -fno-rtti -fno-exceptions... $ECHO_C" >&6; } -if test "${lt_cv_prog_compiler_rtti_exceptions+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -fno-rtti -fno-exceptions" >&5 +$as_echo_n "checking if $compiler supports -fno-rtti -fno-exceptions... " >&6; } +if ${lt_cv_prog_compiler_rtti_exceptions+:} false; then : + $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_rtti_exceptions=no ac_outfile=conftest.$ac_objext @@ -7791,11 +9310,11 @@ lt_cv_prog_compiler_rtti_exceptions=yes fi fi - $RM -r conftest* + $RM conftest* fi -{ echo "$as_me:$LINENO: result: $lt_cv_prog_compiler_rtti_exceptions" >&5 -echo "${ECHO_T}$lt_cv_prog_compiler_rtti_exceptions" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_rtti_exceptions" >&5 +$as_echo "$lt_cv_prog_compiler_rtti_exceptions" >&6; } if test x"$lt_cv_prog_compiler_rtti_exceptions" = xyes; then lt_prog_compiler_no_builtin_flag="$lt_prog_compiler_no_builtin_flag -fno-rtti -fno-exceptions" @@ -7814,8 +9333,6 @@ lt_prog_compiler_pic= lt_prog_compiler_static= -{ echo "$as_me:$LINENO: checking for $compiler option to produce PIC" >&5 -echo $ECHO_N "checking for $compiler option to produce PIC... $ECHO_C" >&6; } if test "$GCC" = yes; then lt_prog_compiler_wl='-Wl,' @@ -7980,6 +9497,12 @@ lt_prog_compiler_pic='--shared' lt_prog_compiler_static='--static' ;; + nagfor*) + # NAG Fortran compiler + lt_prog_compiler_wl='-Wl,-Wl,,' + lt_prog_compiler_pic='-PIC' + lt_prog_compiler_static='-Bstatic' + ;; pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) # Portland Group compilers (*not* the Pentium gcc compiler, # which looks to be a dead project) @@ -8099,22 +9622,26 @@ lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" ;; esac -{ echo "$as_me:$LINENO: result: $lt_prog_compiler_pic" >&5 -echo "${ECHO_T}$lt_prog_compiler_pic" >&6; } - - - - +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 +$as_echo_n "checking for $compiler option to produce PIC... " >&6; } +if ${lt_cv_prog_compiler_pic+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_prog_compiler_pic=$lt_prog_compiler_pic +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5 +$as_echo "$lt_cv_prog_compiler_pic" >&6; } +lt_prog_compiler_pic=$lt_cv_prog_compiler_pic # # Check to make sure the PIC flag actually works. # if test -n "$lt_prog_compiler_pic"; then - { echo "$as_me:$LINENO: checking if $compiler PIC flag $lt_prog_compiler_pic works" >&5 -echo $ECHO_N "checking if $compiler PIC flag $lt_prog_compiler_pic works... $ECHO_C" >&6; } -if test "${lt_cv_prog_compiler_pic_works+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic works" >&5 +$as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic works... " >&6; } +if ${lt_cv_prog_compiler_pic_works+:} false; then : + $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_pic_works=no ac_outfile=conftest.$ac_objext @@ -8143,11 +9670,11 @@ lt_cv_prog_compiler_pic_works=yes fi fi - $RM -r conftest* + $RM conftest* fi -{ echo "$as_me:$LINENO: result: $lt_cv_prog_compiler_pic_works" >&5 -echo "${ECHO_T}$lt_cv_prog_compiler_pic_works" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works" >&5 +$as_echo "$lt_cv_prog_compiler_pic_works" >&6; } if test x"$lt_cv_prog_compiler_pic_works" = xyes; then case $lt_prog_compiler_pic in @@ -8166,14 +9693,19 @@ + + + + + # # Check to make sure the static flag actually works. # wl=$lt_prog_compiler_wl eval lt_tmp_static_flag=\"$lt_prog_compiler_static\" -{ echo "$as_me:$LINENO: checking if $compiler static flag $lt_tmp_static_flag works" >&5 -echo $ECHO_N "checking if $compiler static flag $lt_tmp_static_flag works... $ECHO_C" >&6; } -if test "${lt_cv_prog_compiler_static_works+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5 +$as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; } +if ${lt_cv_prog_compiler_static_works+:} false; then : + $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_static_works=no save_LDFLAGS="$LDFLAGS" @@ -8198,8 +9730,8 @@ LDFLAGS="$save_LDFLAGS" fi -{ echo "$as_me:$LINENO: result: $lt_cv_prog_compiler_static_works" >&5 -echo "${ECHO_T}$lt_cv_prog_compiler_static_works" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works" >&5 +$as_echo "$lt_cv_prog_compiler_static_works" >&6; } if test x"$lt_cv_prog_compiler_static_works" = xyes; then : @@ -8213,10 +9745,10 @@ - { echo "$as_me:$LINENO: checking if $compiler supports -c -o file.$ac_objext" >&5 -echo $ECHO_N "checking if $compiler supports -c -o file.$ac_objext... $ECHO_C" >&6; } -if test "${lt_cv_prog_compiler_c_o+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 +$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } +if ${lt_cv_prog_compiler_c_o+:} false; then : + $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_c_o=no $RM -r conftest 2>/dev/null @@ -8250,28 +9782,28 @@ fi fi chmod u+w . 2>&5 - $RM -r conftest* + $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest - $RM -r conftest* + $RM conftest* fi -{ echo "$as_me:$LINENO: result: $lt_cv_prog_compiler_c_o" >&5 -echo "${ECHO_T}$lt_cv_prog_compiler_c_o" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5 +$as_echo "$lt_cv_prog_compiler_c_o" >&6; } - { echo "$as_me:$LINENO: checking if $compiler supports -c -o file.$ac_objext" >&5 -echo $ECHO_N "checking if $compiler supports -c -o file.$ac_objext... $ECHO_C" >&6; } -if test "${lt_cv_prog_compiler_c_o+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 +$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } +if ${lt_cv_prog_compiler_c_o+:} false; then : + $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_c_o=no $RM -r conftest 2>/dev/null @@ -8305,18 +9837,18 @@ fi fi chmod u+w . 2>&5 - $RM -r conftest* + $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest - $RM -r conftest* + $RM conftest* fi -{ echo "$as_me:$LINENO: result: $lt_cv_prog_compiler_c_o" >&5 -echo "${ECHO_T}$lt_cv_prog_compiler_c_o" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5 +$as_echo "$lt_cv_prog_compiler_c_o" >&6; } @@ -8324,19 +9856,19 @@ hard_links="nottested" if test "$lt_cv_prog_compiler_c_o" = no && test "$need_locks" != no; then # do not overwrite the value of need_locks provided by the user - { echo "$as_me:$LINENO: checking if we can lock with hard links" >&5 -echo $ECHO_N "checking if we can lock with hard links... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5 +$as_echo_n "checking if we can lock with hard links... " >&6; } hard_links=yes - $RM -r conftest* + $RM conftest* ln conftest.a conftest.b 2>/dev/null && hard_links=no touch conftest.a ln conftest.a conftest.b 2>&5 || hard_links=no ln conftest.a conftest.b 2>/dev/null && hard_links=no - { echo "$as_me:$LINENO: result: $hard_links" >&5 -echo "${ECHO_T}$hard_links" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5 +$as_echo "$hard_links" >&6; } if test "$hard_links" = no; then - { echo "$as_me:$LINENO: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 -echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 +$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} need_locks=warn fi else @@ -8348,8 +9880,8 @@ - { echo "$as_me:$LINENO: checking whether the $compiler linker ($LD) supports shared libraries" >&5 -echo $ECHO_N "checking whether the $compiler linker ($LD) supports shared libraries... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 +$as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } runpath_var= allow_undefined_flag= @@ -8516,7 +10048,8 @@ allow_undefined_flag=unsupported always_export_symbols=no enable_shared_with_static_runtimes=yes - export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' + export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' + exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' @@ -8564,7 +10097,7 @@ if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ && test "$tmp_diet" = no then - tmp_addflag= + tmp_addflag=' $pic_flag' tmp_sharedflag='-shared' case $cc_basename,$host_cpu in pgcc*) # Portland Group C compiler @@ -8634,8 +10167,8 @@ archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' wlarc= else - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' fi ;; @@ -8653,8 +10186,8 @@ _LT_EOF elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else ld_shlibs=no fi @@ -8700,8 +10233,8 @@ *) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else ld_shlibs=no fi @@ -8831,11 +10364,13 @@ allow_undefined_flag='-berok' # Determine the default libpath from the value encoded in an # empty executable. - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + if test "${lt_cv_aix_libpath+set}" = set; then + aix_libpath=$lt_cv_aix_libpath +else + if ${lt_cv_aix_libpath_+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int @@ -8846,47 +10381,31 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then +if ac_fn_c_try_link "$LINENO"; then : -lt_aix_libpath_sed=' - /Import File Strings/,/^$/ { - /^0/ { - s/^0 *\(.*\)$/\1/ - p - } - }' -aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -# Check for a 64-bit object if we didn't find anything. -if test -z "$aix_libpath"; then - aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` + lt_aix_libpath_sed=' + /Import File Strings/,/^$/ { + /^0/ { + s/^0 *\([^ ]*\) *$/\1/ + p + } + }' + lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` + # Check for a 64-bit object if we didn't find anything. + if test -z "$lt_cv_aix_libpath_"; then + lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` + fi fi -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + if test -z "$lt_cv_aix_libpath_"; then + lt_cv_aix_libpath_="/usr/lib:/lib" + fi fi -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext -if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + aix_libpath=$lt_cv_aix_libpath_ +fi hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" @@ -8898,11 +10417,13 @@ else # Determine the default libpath from the value encoded in an # empty executable. - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + if test "${lt_cv_aix_libpath+set}" = set; then + aix_libpath=$lt_cv_aix_libpath +else + if ${lt_cv_aix_libpath_+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int @@ -8913,47 +10434,31 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then +if ac_fn_c_try_link "$LINENO"; then : -lt_aix_libpath_sed=' - /Import File Strings/,/^$/ { - /^0/ { - s/^0 *\(.*\)$/\1/ - p - } - }' -aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -# Check for a 64-bit object if we didn't find anything. -if test -z "$aix_libpath"; then - aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` + lt_aix_libpath_sed=' + /Import File Strings/,/^$/ { + /^0/ { + s/^0 *\([^ ]*\) *$/\1/ + p + } + }' + lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` + # Check for a 64-bit object if we didn't find anything. + if test -z "$lt_cv_aix_libpath_"; then + lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` + fi fi -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + if test -z "$lt_cv_aix_libpath_"; then + lt_cv_aix_libpath_="/usr/lib:/lib" + fi fi -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext -if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + aix_libpath=$lt_cv_aix_libpath_ +fi hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, @@ -8998,20 +10503,63 @@ # Microsoft Visual C++. # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. - hardcode_libdir_flag_spec=' ' - allow_undefined_flag=unsupported - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. - shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. - archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' - # The linker will automatically build a .lib file if we build a DLL. - old_archive_from_new_cmds='true' - # FIXME: Should let the user specify the lib program. - old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' - fix_srcfile_path='`cygpath -w "$srcfile"`' - enable_shared_with_static_runtimes=yes + case $cc_basename in + cl*) + # Native MSVC + hardcode_libdir_flag_spec=' ' + allow_undefined_flag=unsupported + always_export_symbols=yes + file_list_spec='@' + # Tell ltmain to make .lib files, not .a files. + libext=lib + # Tell ltmain to make .dll files, not .so files. + shrext_cmds=".dll" + # FIXME: Setting linknames here is a bad hack. + archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' + archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then + sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; + else + sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; + fi~ + $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ + linknames=' + # The linker will not automatically build a static lib if we build a DLL. + # _LT_TAGVAR(old_archive_from_new_cmds, )='true' + enable_shared_with_static_runtimes=yes + export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' + # Don't use ranlib + old_postinstall_cmds='chmod 644 $oldlib' + postlink_cmds='lt_outputfile="@OUTPUT@"~ + lt_tool_outputfile="@TOOL_OUTPUT@"~ + case $lt_outputfile in + *.exe|*.EXE) ;; + *) + lt_outputfile="$lt_outputfile.exe" + lt_tool_outputfile="$lt_tool_outputfile.exe" + ;; + esac~ + if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then + $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; + $RM "$lt_outputfile.manifest"; + fi' + ;; + *) + # Assume MSVC wrapper + hardcode_libdir_flag_spec=' ' + allow_undefined_flag=unsupported + # Tell ltmain to make .lib files, not .a files. + libext=lib + # Tell ltmain to make .dll files, not .so files. + shrext_cmds=".dll" + # FIXME: Setting linknames here is a bad hack. + archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' + # The linker will automatically build a .lib file if we build a DLL. + old_archive_from_new_cmds='true' + # FIXME: Should let the user specify the lib program. + old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' + enable_shared_with_static_runtimes=yes + ;; + esac ;; darwin* | rhapsody*) @@ -9076,7 +10624,7 @@ # FreeBSD 3 and greater uses gcc -shared to do shared libraries. freebsd* | dragonfly*) - archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' + archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes hardcode_shlibpath_var=no @@ -9084,7 +10632,7 @@ hpux9*) if test "$GCC" = yes; then - archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' else archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' fi @@ -9100,7 +10648,7 @@ hpux10*) if test "$GCC" = yes && test "$with_gnu_ld" = no; then - archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' fi @@ -9124,10 +10672,10 @@ archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ;; ia64*) - archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' + archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ;; *) - archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ;; esac else @@ -9142,10 +10690,10 @@ # Older versions of the 11.00 compiler do not understand -b yet # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does) - { echo "$as_me:$LINENO: checking if $CC understands -b" >&5 -echo $ECHO_N "checking if $CC understands -b... $ECHO_C" >&6; } -if test "${lt_cv_prog_compiler__b+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $CC understands -b" >&5 +$as_echo_n "checking if $CC understands -b... " >&6; } +if ${lt_cv_prog_compiler__b+:} false; then : + $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler__b=no save_LDFLAGS="$LDFLAGS" @@ -9170,8 +10718,8 @@ LDFLAGS="$save_LDFLAGS" fi -{ echo "$as_me:$LINENO: result: $lt_cv_prog_compiler__b" >&5 -echo "${ECHO_T}$lt_cv_prog_compiler__b" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler__b" >&5 +$as_echo "$lt_cv_prog_compiler__b" >&6; } if test x"$lt_cv_prog_compiler__b" = xyes; then archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' @@ -9206,45 +10754,36 @@ irix5* | irix6* | nonstopux*) if test "$GCC" = yes; then - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' # Try to use the -exported_symbol ld option, if it does not # work, assume that -exports_file does not work either and # implicitly export all symbols. - save_LDFLAGS="$LDFLAGS" - LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" - cat >conftest.$ac_ext <<_ACEOF -int foo(void) {} + # This should be the same for all languages, so no per-tag cache variable. + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 +$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } +if ${lt_cv_irix_exported_symbol+:} false; then : + $as_echo_n "(cached) " >&6 +else + save_LDFLAGS="$LDFLAGS" + LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +int foo (void) { return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then - archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' - +if ac_fn_c_try_link "$LINENO"; then : + lt_cv_irix_exported_symbol=yes else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - + lt_cv_irix_exported_symbol=no fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext - LDFLAGS="$save_LDFLAGS" +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + LDFLAGS="$save_LDFLAGS" +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 +$as_echo "$lt_cv_irix_exported_symbol" >&6; } + if test "$lt_cv_irix_exported_symbol" = yes; then + archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' + fi else archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' @@ -9329,7 +10868,7 @@ osf4* | osf5*) # as osf3* with the addition of -msym flag if test "$GCC" = yes; then allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' - archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' else allow_undefined_flag=' -expect_unresolved \*' @@ -9348,9 +10887,9 @@ no_undefined_flag=' -z defs' if test "$GCC" = yes; then wlarc='${wl}' - archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ - $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' + $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' else case `$CC -V 2>&1` in *"Compilers 5.0"*) @@ -9499,8 +11038,8 @@ fi fi -{ echo "$as_me:$LINENO: result: $ld_shlibs" >&5 -echo "${ECHO_T}$ld_shlibs" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs" >&5 +$as_echo "$ld_shlibs" >&6; } test "$ld_shlibs" = no && can_build_shared=no with_gnu_ld=$with_gnu_ld @@ -9536,19 +11075,19 @@ # Test whether the compiler implicitly links with -lc since on some # systems, -lgcc has to come before -lc. If gcc already passes -lc # to ld, don't add -lc before -lgcc. - { echo "$as_me:$LINENO: checking whether -lc should be explicitly linked in" >&5 -echo $ECHO_N "checking whether -lc should be explicitly linked in... $ECHO_C" >&6; } -if test "${lt_cv_archive_cmds_need_lc+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5 +$as_echo_n "checking whether -lc should be explicitly linked in... " >&6; } +if ${lt_cv_archive_cmds_need_lc+:} false; then : + $as_echo_n "(cached) " >&6 else - $RM -r conftest* + $RM conftest* echo "$lt_simple_compile_test_code" > conftest.$ac_ext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } 2>conftest.err; then + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } 2>conftest.err; then soname=conftest lib=conftest libobjs=conftest.$ac_objext @@ -9562,11 +11101,11 @@ libname=conftest lt_save_allow_undefined_flag=$allow_undefined_flag allow_undefined_flag= - if { (eval echo "$as_me:$LINENO: \"$archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\"") >&5 + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5 (eval $archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5 ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } then lt_cv_archive_cmds_need_lc=no else @@ -9576,11 +11115,11 @@ else cat conftest.err 1>&5 fi - $RM -r conftest* + $RM conftest* fi -{ echo "$as_me:$LINENO: result: $lt_cv_archive_cmds_need_lc" >&5 -echo "${ECHO_T}$lt_cv_archive_cmds_need_lc" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_archive_cmds_need_lc" >&5 +$as_echo "$lt_cv_archive_cmds_need_lc" >&6; } archive_cmds_need_lc=$lt_cv_archive_cmds_need_lc ;; esac @@ -9744,8 +11283,8 @@ - { echo "$as_me:$LINENO: checking dynamic linker characteristics" >&5 -echo $ECHO_N "checking dynamic linker characteristics... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5 +$as_echo_n "checking dynamic linker characteristics... " >&6; } if test "$GCC" = yes; then case $host_os in @@ -9926,8 +11465,9 @@ need_version=no need_lib_prefix=no - case $GCC,$host_os in - yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) + case $GCC,$cc_basename in + yes,*) + # gcc library_names_spec='$libname.dll.a' # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \${file}`~ @@ -9960,13 +11500,71 @@ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' ;; esac + dynamic_linker='Win32 ld.exe' + ;; + + *,cl*) + # Native MSVC + libname_spec='$name' + soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' + library_names_spec='${libname}.dll.lib' + + case $build_os in + mingw*) + sys_lib_search_path_spec= + lt_save_ifs=$IFS + IFS=';' + for lt_path in $LIB + do + IFS=$lt_save_ifs + # Let DOS variable expansion print the short 8.3 style file name. + lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` + sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" + done + IFS=$lt_save_ifs + # Convert to MSYS style. + sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` + ;; + cygwin*) + # Convert to unix form, then to dos form, then back to unix form + # but this time dos style (no spaces!) so that the unix form looks + # like /cygdrive/c/PROGRA~1:/cygdr... + sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` + sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` + sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` + ;; + *) + sys_lib_search_path_spec="$LIB" + if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then + # It is most probably a Windows format PATH. + sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` + else + sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` + fi + # FIXME: find the short name or the path components, as spaces are + # common. (e.g. "Program Files" -> "PROGRA~1") + ;; + esac + + # DLL is installed to $(libdir)/../bin by postinstall_cmds + postinstall_cmds='base_file=`basename \${file}`~ + dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ + dldir=$destdir/`dirname \$dlpath`~ + test -d \$dldir || mkdir -p \$dldir~ + $install_prog $dir/$dlname \$dldir/$dlname' + postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ + dlpath=$dir/\$dldll~ + $RM \$dlpath' + shlibpath_overrides_runpath=yes + dynamic_linker='Win32 link.exe' ;; *) + # Assume MSVC wrapper library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' + dynamic_linker='Win32 ld.exe' ;; esac - dynamic_linker='Win32 ld.exe' # FIXME: first we should search . and the directory the executable is in shlibpath_var=PATH ;; @@ -9976,7 +11574,7 @@ version_type=darwin need_lib_prefix=no need_version=no - library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext ${libname}${release}${versuffix}$shared_ext' + library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' soname_spec='${libname}${release}${major}$shared_ext' shlibpath_overrides_runpath=yes shlibpath_var=DYLD_LIBRARY_PATH @@ -10010,7 +11608,14 @@ *) objformat=elf ;; esac fi - version_type=freebsd-$objformat + # Handle Gentoo/FreeBSD as it was Linux + case $host_vendor in + gentoo) + version_type=linux ;; + *) + version_type=freebsd-$objformat ;; + esac + case $version_type in freebsd-elf*) library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' @@ -10021,6 +11626,12 @@ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' need_version=yes ;; + linux) + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + need_lib_prefix=no + need_version=no + ;; esac shlibpath_var=LD_LIBRARY_PATH case $host_os in @@ -10180,19 +11791,15 @@ shlibpath_overrides_runpath=no # Some binutils ld are patched to set DT_RUNPATH - if test "${lt_cv_shlibpath_overrides_runpath+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + if ${lt_cv_shlibpath_overrides_runpath+:} false; then : + $as_echo_n "(cached) " >&6 else lt_cv_shlibpath_overrides_runpath=no save_LDFLAGS=$LDFLAGS save_libdir=$libdir eval "libdir=/foo; wl=\"$lt_prog_compiler_wl\"; \ LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec\"" - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int @@ -10203,37 +11810,13 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then - if ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then +if ac_fn_c_try_link "$LINENO"; then : + if ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then : lt_cv_shlibpath_overrides_runpath=yes fi - -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext LDFLAGS=$save_LDFLAGS libdir=$save_libdir @@ -10449,8 +12032,8 @@ dynamic_linker=no ;; esac -{ echo "$as_me:$LINENO: result: $dynamic_linker" >&5 -echo "${ECHO_T}$dynamic_linker" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5 +$as_echo "$dynamic_linker" >&6; } test "$dynamic_linker" = no && can_build_shared=no variables_saved_for_relink="PATH $shlibpath_var $runpath_var" @@ -10556,8 +12139,8 @@ - { echo "$as_me:$LINENO: checking how to hardcode library paths into programs" >&5 -echo $ECHO_N "checking how to hardcode library paths into programs... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5 +$as_echo_n "checking how to hardcode library paths into programs... " >&6; } hardcode_action= if test -n "$hardcode_libdir_flag_spec" || test -n "$runpath_var" || @@ -10581,8 +12164,8 @@ # directories. hardcode_action=unsupported fi -{ echo "$as_me:$LINENO: result: $hardcode_action" >&5 -echo "${ECHO_T}$hardcode_action" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action" >&5 +$as_echo "$hardcode_action" >&6; } if test "$hardcode_action" = relink || test "$inherit_rpath" = yes; then @@ -10626,18 +12209,14 @@ darwin*) # if libdl is installed we need to link against it - { echo "$as_me:$LINENO: checking for dlopen in -ldl" >&5 -echo $ECHO_N "checking for dlopen in -ldl... $ECHO_C" >&6; } -if test "${ac_cv_lib_dl_dlopen+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 +$as_echo_n "checking for dlopen in -ldl... " >&6; } +if ${ac_cv_lib_dl_dlopen+:} false; then : + $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldl $LIBS" -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF +cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. @@ -10655,39 +12234,18 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then +if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dl_dlopen=yes else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_cv_lib_dl_dlopen=no + ac_cv_lib_dl_dlopen=no fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi -{ echo "$as_me:$LINENO: result: $ac_cv_lib_dl_dlopen" >&5 -echo "${ECHO_T}$ac_cv_lib_dl_dlopen" >&6; } -if test $ac_cv_lib_dl_dlopen = yes; then +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 +$as_echo "$ac_cv_lib_dl_dlopen" >&6; } +if test "x$ac_cv_lib_dl_dlopen" = xyes; then : lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" else @@ -10700,102 +12258,18 @@ ;; *) - { echo "$as_me:$LINENO: checking for shl_load" >&5 -echo $ECHO_N "checking for shl_load... $ECHO_C" >&6; } -if test "${ac_cv_func_shl_load+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -/* Define shl_load to an innocuous variant, in case declares shl_load. - For example, HP-UX 11i declares gettimeofday. */ -#define shl_load innocuous_shl_load - -/* System header to define __stub macros and hopefully few prototypes, - which can conflict with char shl_load (); below. - Prefer to if __STDC__ is defined, since - exists even on freestanding compilers. */ - -#ifdef __STDC__ -# include -#else -# include -#endif - -#undef shl_load - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char shl_load (); -/* The GNU C library defines this for functions which it implements - to always fail with ENOSYS. Some functions are actually named - something starting with __ and the normal name is an alias. */ -#if defined __stub_shl_load || defined __stub___shl_load -choke me -#endif - -int -main () -{ -return shl_load (); - ; - return 0; -} -_ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then - ac_cv_func_shl_load=yes -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_cv_func_shl_load=no -fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext -fi -{ echo "$as_me:$LINENO: result: $ac_cv_func_shl_load" >&5 -echo "${ECHO_T}$ac_cv_func_shl_load" >&6; } -if test $ac_cv_func_shl_load = yes; then + ac_fn_c_check_func "$LINENO" "shl_load" "ac_cv_func_shl_load" +if test "x$ac_cv_func_shl_load" = xyes; then : lt_cv_dlopen="shl_load" else - { echo "$as_me:$LINENO: checking for shl_load in -ldld" >&5 -echo $ECHO_N "checking for shl_load in -ldld... $ECHO_C" >&6; } -if test "${ac_cv_lib_dld_shl_load+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shl_load in -ldld" >&5 +$as_echo_n "checking for shl_load in -ldld... " >&6; } +if ${ac_cv_lib_dld_shl_load+:} false; then : + $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldld $LIBS" -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF +cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. @@ -10813,137 +12287,32 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then +if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dld_shl_load=yes else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_cv_lib_dld_shl_load=no + ac_cv_lib_dld_shl_load=no fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi -{ echo "$as_me:$LINENO: result: $ac_cv_lib_dld_shl_load" >&5 -echo "${ECHO_T}$ac_cv_lib_dld_shl_load" >&6; } -if test $ac_cv_lib_dld_shl_load = yes; then +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_shl_load" >&5 +$as_echo "$ac_cv_lib_dld_shl_load" >&6; } +if test "x$ac_cv_lib_dld_shl_load" = xyes; then : lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld" else - { echo "$as_me:$LINENO: checking for dlopen" >&5 -echo $ECHO_N "checking for dlopen... $ECHO_C" >&6; } -if test "${ac_cv_func_dlopen+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -/* Define dlopen to an innocuous variant, in case declares dlopen. - For example, HP-UX 11i declares gettimeofday. */ -#define dlopen innocuous_dlopen - -/* System header to define __stub macros and hopefully few prototypes, - which can conflict with char dlopen (); below. - Prefer to if __STDC__ is defined, since - exists even on freestanding compilers. */ - -#ifdef __STDC__ -# include -#else -# include -#endif - -#undef dlopen - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char dlopen (); -/* The GNU C library defines this for functions which it implements - to always fail with ENOSYS. Some functions are actually named - something starting with __ and the normal name is an alias. */ -#if defined __stub_dlopen || defined __stub___dlopen -choke me -#endif - -int -main () -{ -return dlopen (); - ; - return 0; -} -_ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then - ac_cv_func_dlopen=yes -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_cv_func_dlopen=no -fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext -fi -{ echo "$as_me:$LINENO: result: $ac_cv_func_dlopen" >&5 -echo "${ECHO_T}$ac_cv_func_dlopen" >&6; } -if test $ac_cv_func_dlopen = yes; then + ac_fn_c_check_func "$LINENO" "dlopen" "ac_cv_func_dlopen" +if test "x$ac_cv_func_dlopen" = xyes; then : lt_cv_dlopen="dlopen" else - { echo "$as_me:$LINENO: checking for dlopen in -ldl" >&5 -echo $ECHO_N "checking for dlopen in -ldl... $ECHO_C" >&6; } -if test "${ac_cv_lib_dl_dlopen+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 +$as_echo_n "checking for dlopen in -ldl... " >&6; } +if ${ac_cv_lib_dl_dlopen+:} false; then : + $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldl $LIBS" -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF +cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. @@ -10961,53 +12330,28 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then +if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dl_dlopen=yes else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_cv_lib_dl_dlopen=no + ac_cv_lib_dl_dlopen=no fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi -{ echo "$as_me:$LINENO: result: $ac_cv_lib_dl_dlopen" >&5 -echo "${ECHO_T}$ac_cv_lib_dl_dlopen" >&6; } -if test $ac_cv_lib_dl_dlopen = yes; then +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 +$as_echo "$ac_cv_lib_dl_dlopen" >&6; } +if test "x$ac_cv_lib_dl_dlopen" = xyes; then : lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" else - { echo "$as_me:$LINENO: checking for dlopen in -lsvld" >&5 -echo $ECHO_N "checking for dlopen in -lsvld... $ECHO_C" >&6; } -if test "${ac_cv_lib_svld_dlopen+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -lsvld" >&5 +$as_echo_n "checking for dlopen in -lsvld... " >&6; } +if ${ac_cv_lib_svld_dlopen+:} false; then : + $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lsvld $LIBS" -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF +cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. @@ -11025,53 +12369,28 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then +if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_svld_dlopen=yes else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_cv_lib_svld_dlopen=no + ac_cv_lib_svld_dlopen=no fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi -{ echo "$as_me:$LINENO: result: $ac_cv_lib_svld_dlopen" >&5 -echo "${ECHO_T}$ac_cv_lib_svld_dlopen" >&6; } -if test $ac_cv_lib_svld_dlopen = yes; then +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_svld_dlopen" >&5 +$as_echo "$ac_cv_lib_svld_dlopen" >&6; } +if test "x$ac_cv_lib_svld_dlopen" = xyes; then : lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld" else - { echo "$as_me:$LINENO: checking for dld_link in -ldld" >&5 -echo $ECHO_N "checking for dld_link in -ldld... $ECHO_C" >&6; } -if test "${ac_cv_lib_dld_dld_link+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dld_link in -ldld" >&5 +$as_echo_n "checking for dld_link in -ldld... " >&6; } +if ${ac_cv_lib_dld_dld_link+:} false; then : + $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldld $LIBS" -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF +cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. @@ -11089,39 +12408,18 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then +if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dld_dld_link=yes else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_cv_lib_dld_dld_link=no + ac_cv_lib_dld_dld_link=no fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi -{ echo "$as_me:$LINENO: result: $ac_cv_lib_dld_dld_link" >&5 -echo "${ECHO_T}$ac_cv_lib_dld_dld_link" >&6; } -if test $ac_cv_lib_dld_dld_link = yes; then +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_dld_link" >&5 +$as_echo "$ac_cv_lib_dld_dld_link" >&6; } +if test "x$ac_cv_lib_dld_dld_link" = xyes; then : lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld" fi @@ -11160,10 +12458,10 @@ save_LIBS="$LIBS" LIBS="$lt_cv_dlopen_libs $LIBS" - { echo "$as_me:$LINENO: checking whether a program can dlopen itself" >&5 -echo $ECHO_N "checking whether a program can dlopen itself... $ECHO_C" >&6; } -if test "${lt_cv_dlopen_self+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a program can dlopen itself" >&5 +$as_echo_n "checking whether a program can dlopen itself... " >&6; } +if ${lt_cv_dlopen_self+:} false; then : + $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : lt_cv_dlopen_self=cross @@ -11215,10 +12513,10 @@ /* When -fvisbility=hidden is used, assume the code has been annotated correspondingly for the symbols needed. */ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) -void fnord () __attribute__((visibility("default"))); +int fnord () __attribute__((visibility("default"))); #endif -void fnord () { int i=42; } +int fnord () { return 42; } int main () { void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); @@ -11240,11 +12538,11 @@ return status; } _LT_EOF - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 (eval $ac_link) 2>&5 ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && test -s conftest${ac_exeext} 2>/dev/null; then + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then (./conftest; exit; ) >&5 2>/dev/null lt_status=$? case x$lt_status in @@ -11261,15 +12559,15 @@ fi -{ echo "$as_me:$LINENO: result: $lt_cv_dlopen_self" >&5 -echo "${ECHO_T}$lt_cv_dlopen_self" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self" >&5 +$as_echo "$lt_cv_dlopen_self" >&6; } if test "x$lt_cv_dlopen_self" = xyes; then wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\" - { echo "$as_me:$LINENO: checking whether a statically linked program can dlopen itself" >&5 -echo $ECHO_N "checking whether a statically linked program can dlopen itself... $ECHO_C" >&6; } -if test "${lt_cv_dlopen_self_static+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a statically linked program can dlopen itself" >&5 +$as_echo_n "checking whether a statically linked program can dlopen itself... " >&6; } +if ${lt_cv_dlopen_self_static+:} false; then : + $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : lt_cv_dlopen_self_static=cross @@ -11321,10 +12619,10 @@ /* When -fvisbility=hidden is used, assume the code has been annotated correspondingly for the symbols needed. */ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) -void fnord () __attribute__((visibility("default"))); +int fnord () __attribute__((visibility("default"))); #endif -void fnord () { int i=42; } +int fnord () { return 42; } int main () { void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); @@ -11346,11 +12644,11 @@ return status; } _LT_EOF - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 (eval $ac_link) 2>&5 ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && test -s conftest${ac_exeext} 2>/dev/null; then + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then (./conftest; exit; ) >&5 2>/dev/null lt_status=$? case x$lt_status in @@ -11367,8 +12665,8 @@ fi -{ echo "$as_me:$LINENO: result: $lt_cv_dlopen_self_static" >&5 -echo "${ECHO_T}$lt_cv_dlopen_self_static" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self_static" >&5 +$as_echo "$lt_cv_dlopen_self_static" >&6; } fi CPPFLAGS="$save_CPPFLAGS" @@ -11406,13 +12704,13 @@ striplib= old_striplib= -{ echo "$as_me:$LINENO: checking whether stripping libraries is possible" >&5 -echo $ECHO_N "checking whether stripping libraries is possible... $ECHO_C" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether stripping libraries is possible" >&5 +$as_echo_n "checking whether stripping libraries is possible... " >&6; } if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then test -z "$old_striplib" && old_striplib="$STRIP --strip-debug" test -z "$striplib" && striplib="$STRIP --strip-unneeded" - { echo "$as_me:$LINENO: result: yes" >&5 -echo "${ECHO_T}yes" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } else # FIXME - insert some real tests, host_os isn't really good enough case $host_os in @@ -11420,16 +12718,16 @@ if test -n "$STRIP" ; then striplib="$STRIP -x" old_striplib="$STRIP -S" - { echo "$as_me:$LINENO: result: yes" >&5 -echo "${ECHO_T}yes" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi ;; *) - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } ;; esac fi @@ -11446,13 +12744,13 @@ # Report which library types will actually be built - { echo "$as_me:$LINENO: checking if libtool supports shared libraries" >&5 -echo $ECHO_N "checking if libtool supports shared libraries... $ECHO_C" >&6; } - { echo "$as_me:$LINENO: result: $can_build_shared" >&5 -echo "${ECHO_T}$can_build_shared" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5 +$as_echo_n "checking if libtool supports shared libraries... " >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5 +$as_echo "$can_build_shared" >&6; } - { echo "$as_me:$LINENO: checking whether to build shared libraries" >&5 -echo $ECHO_N "checking whether to build shared libraries... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5 +$as_echo_n "checking whether to build shared libraries... " >&6; } test "$can_build_shared" = "no" && enable_shared=no # On AIX, shared libraries and static libraries use the same namespace, and @@ -11472,15 +12770,15 @@ fi ;; esac - { echo "$as_me:$LINENO: result: $enable_shared" >&5 -echo "${ECHO_T}$enable_shared" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_shared" >&5 +$as_echo "$enable_shared" >&6; } - { echo "$as_me:$LINENO: checking whether to build static libraries" >&5 -echo $ECHO_N "checking whether to build static libraries... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5 +$as_echo_n "checking whether to build static libraries... " >&6; } # Make sure either enable_shared or enable_static is yes. test "$enable_shared" = yes || enable_static=yes - { echo "$as_me:$LINENO: result: $enable_static" >&5 -echo "${ECHO_T}$enable_static" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5 +$as_echo "$enable_static" >&6; } @@ -11515,15 +12813,15 @@ -{ echo "$as_me:$LINENO: checking whether ln -s works" >&5 -echo $ECHO_N "checking whether ln -s works... $ECHO_C" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ln -s works" >&5 +$as_echo_n "checking whether ln -s works... " >&6; } LN_S=$as_ln_s if test "$LN_S" = "ln -s"; then - { echo "$as_me:$LINENO: result: yes" >&5 -echo "${ECHO_T}yes" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } else - { echo "$as_me:$LINENO: result: no, using $LN_S" >&5 -echo "${ECHO_T}no, using $LN_S" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no, using $LN_S" >&5 +$as_echo "no, using $LN_S" >&6; } fi @@ -11533,14 +12831,15 @@ + if test "x$ac_cv_env_PKG_CONFIG_set" != "xset"; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}pkg-config", so it can be a program name with args. set dummy ${ac_tool_prefix}pkg-config; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_path_PKG_CONFIG+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_path_PKG_CONFIG+:} false; then : + $as_echo_n "(cached) " >&6 else case $PKG_CONFIG in [\\/]* | ?:[\\/]*) @@ -11552,14 +12851,14 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS ;; @@ -11567,11 +12866,11 @@ fi PKG_CONFIG=$ac_cv_path_PKG_CONFIG if test -n "$PKG_CONFIG"; then - { echo "$as_me:$LINENO: result: $PKG_CONFIG" >&5 -echo "${ECHO_T}$PKG_CONFIG" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PKG_CONFIG" >&5 +$as_echo "$PKG_CONFIG" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi @@ -11580,10 +12879,10 @@ ac_pt_PKG_CONFIG=$PKG_CONFIG # Extract the first word of "pkg-config", so it can be a program name with args. set dummy pkg-config; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_path_ac_pt_PKG_CONFIG+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_path_ac_pt_PKG_CONFIG+:} false; then : + $as_echo_n "(cached) " >&6 else case $ac_pt_PKG_CONFIG in [\\/]* | ?:[\\/]*) @@ -11595,14 +12894,14 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_ac_pt_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS ;; @@ -11610,11 +12909,11 @@ fi ac_pt_PKG_CONFIG=$ac_cv_path_ac_pt_PKG_CONFIG if test -n "$ac_pt_PKG_CONFIG"; then - { echo "$as_me:$LINENO: result: $ac_pt_PKG_CONFIG" >&5 -echo "${ECHO_T}$ac_pt_PKG_CONFIG" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_pt_PKG_CONFIG" >&5 +$as_echo "$ac_pt_PKG_CONFIG" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi if test "x$ac_pt_PKG_CONFIG" = x; then @@ -11622,12 +12921,8 @@ else case $cross_compiling:$ac_tool_warned in yes:) -{ echo "$as_me:$LINENO: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&5 -echo "$as_me: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&2;} +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac PKG_CONFIG=$ac_pt_PKG_CONFIG @@ -11639,14 +12934,14 @@ fi if test -n "$PKG_CONFIG"; then _pkg_min_version=0.9.0 - { echo "$as_me:$LINENO: checking pkg-config is at least version $_pkg_min_version" >&5 -echo $ECHO_N "checking pkg-config is at least version $_pkg_min_version... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking pkg-config is at least version $_pkg_min_version" >&5 +$as_echo_n "checking pkg-config is at least version $_pkg_min_version... " >&6; } if $PKG_CONFIG --atleast-pkgconfig-version $_pkg_min_version; then - { echo "$as_me:$LINENO: result: yes" >&5 -echo "${ECHO_T}yes" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } PKG_CONFIG="" fi fi @@ -11669,10 +12964,10 @@ do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_CXX+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_CXX+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$CXX"; then ac_cv_prog_CXX="$CXX" # Let the user override the test. @@ -11682,25 +12977,25 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CXX="$ac_tool_prefix$ac_prog" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi CXX=$ac_cv_prog_CXX if test -n "$CXX"; then - { echo "$as_me:$LINENO: result: $CXX" >&5 -echo "${ECHO_T}$CXX" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXX" >&5 +$as_echo "$CXX" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi @@ -11713,10 +13008,10 @@ do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_prog_ac_ct_CXX+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_ac_ct_CXX+:} false; then : + $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CXX"; then ac_cv_prog_ac_ct_CXX="$ac_ct_CXX" # Let the user override the test. @@ -11726,25 +13021,25 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CXX="$ac_prog" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS fi fi ac_ct_CXX=$ac_cv_prog_ac_ct_CXX if test -n "$ac_ct_CXX"; then - { echo "$as_me:$LINENO: result: $ac_ct_CXX" >&5 -echo "${ECHO_T}$ac_ct_CXX" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CXX" >&5 +$as_echo "$ac_ct_CXX" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi @@ -11756,64 +13051,47 @@ else case $cross_compiling:$ac_tool_warned in yes:) -{ echo "$as_me:$LINENO: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&5 -echo "$as_me: WARNING: In the future, Autoconf will not detect cross-tools -whose name does not start with the host triplet. If you think this -configuration is useful to you, please write to autoconf@gnu.org." >&2;} +{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 +$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac - CXX=$ac_ct_CXX - fi -fi - - fi -fi -# Provide some information about the compiler. -echo "$as_me:$LINENO: checking for C++ compiler version" >&5 -ac_compiler=`set X $ac_compile; echo $2` -{ (ac_try="$ac_compiler --version >&5" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compiler --version >&5") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } -{ (ac_try="$ac_compiler -v >&5" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compiler -v >&5") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } -{ (ac_try="$ac_compiler -V >&5" + CXX=$ac_ct_CXX + fi +fi + + fi +fi +# Provide some information about the compiler. +$as_echo "$as_me:${as_lineno-$LINENO}: checking for C++ compiler version" >&5 +set X $ac_compile +ac_compiler=$2 +for ac_option in --version -v -V -qversion; do + { { ac_try="$ac_compiler $ac_option >&5" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compiler -V >&5") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } - -{ echo "$as_me:$LINENO: checking whether we are using the GNU C++ compiler" >&5 -echo $ECHO_N "checking whether we are using the GNU C++ compiler... $ECHO_C" >&6; } -if test "${ac_cv_cxx_compiler_gnu+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" +$as_echo "$ac_try_echo"; } >&5 + (eval "$ac_compiler $ac_option >&5") 2>conftest.err + ac_status=$? + if test -s conftest.err; then + sed '10a\ +... rest of stderr output deleted ... + 10q' conftest.err >conftest.er1 + cat conftest.er1 >&5 + fi + rm -f conftest.er1 conftest.err + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } +done + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C++ compiler" >&5 +$as_echo_n "checking whether we are using the GNU C++ compiler... " >&6; } +if ${ac_cv_cxx_compiler_gnu+:} false; then : + $as_echo_n "(cached) " >&6 else - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int @@ -11827,54 +13105,34 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then +if ac_fn_cxx_try_compile "$LINENO"; then : ac_compiler_gnu=yes else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_compiler_gnu=no + ac_compiler_gnu=no fi - rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_cxx_compiler_gnu=$ac_compiler_gnu fi -{ echo "$as_me:$LINENO: result: $ac_cv_cxx_compiler_gnu" >&5 -echo "${ECHO_T}$ac_cv_cxx_compiler_gnu" >&6; } -GXX=`test $ac_compiler_gnu = yes && echo yes` +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_cxx_compiler_gnu" >&5 +$as_echo "$ac_cv_cxx_compiler_gnu" >&6; } +if test $ac_compiler_gnu = yes; then + GXX=yes +else + GXX= +fi ac_test_CXXFLAGS=${CXXFLAGS+set} ac_save_CXXFLAGS=$CXXFLAGS -{ echo "$as_me:$LINENO: checking whether $CXX accepts -g" >&5 -echo $ECHO_N "checking whether $CXX accepts -g... $ECHO_C" >&6; } -if test "${ac_cv_prog_cxx_g+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CXX accepts -g" >&5 +$as_echo_n "checking whether $CXX accepts -g... " >&6; } +if ${ac_cv_prog_cxx_g+:} false; then : + $as_echo_n "(cached) " >&6 else ac_save_cxx_werror_flag=$ac_cxx_werror_flag ac_cxx_werror_flag=yes ac_cv_prog_cxx_g=no CXXFLAGS="-g" - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int @@ -11885,34 +13143,11 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then +if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_prog_cxx_g=yes else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - CXXFLAGS="" - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + CXXFLAGS="" + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int @@ -11923,35 +13158,12 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then - : -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 +if ac_fn_cxx_try_compile "$LINENO"; then : - ac_cxx_werror_flag=$ac_save_cxx_werror_flag +else + ac_cxx_werror_flag=$ac_save_cxx_werror_flag CXXFLAGS="-g" - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int @@ -11962,42 +13174,18 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then +if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_prog_cxx_g=yes -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - fi - rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi - rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi - rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cxx_werror_flag=$ac_save_cxx_werror_flag fi -{ echo "$as_me:$LINENO: result: $ac_cv_prog_cxx_g" >&5 -echo "${ECHO_T}$ac_cv_prog_cxx_g" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cxx_g" >&5 +$as_echo "$ac_cv_prog_cxx_g" >&6; } if test "$ac_test_CXXFLAGS" = set; then CXXFLAGS=$ac_save_CXXFLAGS elif test $ac_cv_prog_cxx_g = yes; then @@ -12021,10 +13209,10 @@ depcc="$CXX" am_compiler_list= -{ echo "$as_me:$LINENO: checking dependency style of $depcc" >&5 -echo $ECHO_N "checking dependency style of $depcc... $ECHO_C" >&6; } -if test "${am_cv_CXX_dependencies_compiler_type+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 +$as_echo_n "checking dependency style of $depcc... " >&6; } +if ${am_cv_CXX_dependencies_compiler_type+:} false; then : + $as_echo_n "(cached) " >&6 else if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up @@ -12032,6 +13220,7 @@ # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named `D' -- because `-MD' means `put the output # in D'. + rm -rf conftest.dir mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. @@ -12049,6 +13238,11 @@ if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` fi + am__universal=false + case " $depcc " in #( + *\ -arch\ *\ -arch\ *) am__universal=true ;; + esac + for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and @@ -12066,7 +13260,17 @@ done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf + # We check with `-c' and `-o' for the sake of the "dashmstdout" + # mode. It turns out that the SunPro C++ compiler does not properly + # handle `-M -o', and we need to detect this. Also, some Intel + # versions had trouble with output in subdirs + am__obj=sub/conftest.${OBJEXT-o} + am__minus_obj="-o $am__obj" case $depmode in + gcc) + # This depmode causes a compiler race in universal mode. + test "$am__universal" = false || continue + ;; nosideeffect) # after this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested @@ -12076,19 +13280,23 @@ break fi ;; + msvc7 | msvc7msys | msvisualcpp | msvcmsys) + # This compiler won't grok `-c -o', but also, the minuso test has + # not run yet. These depmodes are late enough in the game, and + # so weak that their functioning should not be impacted. + am__obj=conftest.${OBJEXT-o} + am__minus_obj= + ;; none) break ;; esac - # We check with `-c' and `-o' for the sake of the "dashmstdout" - # mode. It turns out that the SunPro C++ compiler does not properly - # handle `-M -o', and we need to detect this. if depmode=$depmode \ - source=sub/conftest.c object=sub/conftest.${OBJEXT-o} \ + source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ - $SHELL ./depcomp $depcc -c -o sub/conftest.${OBJEXT-o} sub/conftest.c \ + $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && - grep sub/conftest.${OBJEXT-o} sub/conftest.Po > /dev/null 2>&1 && + grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message @@ -12112,8 +13320,8 @@ fi fi -{ echo "$as_me:$LINENO: result: $am_cv_CXX_dependencies_compiler_type" >&5 -echo "${ECHO_T}$am_cv_CXX_dependencies_compiler_type" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CXX_dependencies_compiler_type" >&5 +$as_echo "$am_cv_CXX_dependencies_compiler_type" >&6; } CXXDEPMODE=depmode=$am_cv_CXX_dependencies_compiler_type if @@ -12127,6 +13335,16 @@ fi + + +func_stripname_cnf () +{ + case ${2} in + .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; + *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; + esac +} # func_stripname_cnf + if test -n "$CXX" && ( test "X$CXX" != "Xno" && ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) || (test "X$CXX" != "Xg++"))) ; then @@ -12135,11 +13353,11 @@ ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu -{ echo "$as_me:$LINENO: checking how to run the C++ preprocessor" >&5 -echo $ECHO_N "checking how to run the C++ preprocessor... $ECHO_C" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C++ preprocessor" >&5 +$as_echo_n "checking how to run the C++ preprocessor... " >&6; } if test -z "$CXXCPP"; then - if test "${ac_cv_prog_CXXCPP+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + if ${ac_cv_prog_CXXCPP+:} false; then : + $as_echo_n "(cached) " >&6 else # Double quotes because CXXCPP needs to be expanded for CXXCPP in "$CXX -E" "/lib/cpp" @@ -12153,11 +13371,7 @@ # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include @@ -12166,76 +13380,34 @@ #endif Syntax error _ACEOF -if { (ac_try="$ac_cpp conftest.$ac_ext" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_cpp conftest.$ac_ext") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } >/dev/null && { - test -z "$ac_cxx_preproc_warn_flag$ac_cxx_werror_flag" || - test ! -s conftest.err - }; then - : -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 +if ac_fn_cxx_try_cpp "$LINENO"; then : +else # Broken: fails on valid input. continue fi - -rm -f conftest.err conftest.$ac_ext +rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF -if { (ac_try="$ac_cpp conftest.$ac_ext" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_cpp conftest.$ac_ext") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } >/dev/null && { - test -z "$ac_cxx_preproc_warn_flag$ac_cxx_werror_flag" || - test ! -s conftest.err - }; then +if ac_fn_cxx_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - # Passes both tests. ac_preproc_ok=: break fi - -rm -f conftest.err conftest.$ac_ext +rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. -rm -f conftest.err conftest.$ac_ext -if $ac_preproc_ok; then +rm -f conftest.i conftest.err conftest.$ac_ext +if $ac_preproc_ok; then : break fi @@ -12247,8 +13419,8 @@ else ac_cv_prog_CXXCPP=$CXXCPP fi -{ echo "$as_me:$LINENO: result: $CXXCPP" >&5 -echo "${ECHO_T}$CXXCPP" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXXCPP" >&5 +$as_echo "$CXXCPP" >&6; } ac_preproc_ok=false for ac_cxx_preproc_warn_flag in '' yes do @@ -12258,11 +13430,7 @@ # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include @@ -12271,83 +13439,40 @@ #endif Syntax error _ACEOF -if { (ac_try="$ac_cpp conftest.$ac_ext" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_cpp conftest.$ac_ext") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } >/dev/null && { - test -z "$ac_cxx_preproc_warn_flag$ac_cxx_werror_flag" || - test ! -s conftest.err - }; then - : -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 +if ac_fn_cxx_try_cpp "$LINENO"; then : +else # Broken: fails on valid input. continue fi - -rm -f conftest.err conftest.$ac_ext +rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF -if { (ac_try="$ac_cpp conftest.$ac_ext" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_cpp conftest.$ac_ext") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } >/dev/null && { - test -z "$ac_cxx_preproc_warn_flag$ac_cxx_werror_flag" || - test ! -s conftest.err - }; then +if ac_fn_cxx_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - # Passes both tests. ac_preproc_ok=: break fi - -rm -f conftest.err conftest.$ac_ext +rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. -rm -f conftest.err conftest.$ac_ext -if $ac_preproc_ok; then - : +rm -f conftest.i conftest.err conftest.$ac_ext +if $ac_preproc_ok; then : + else - { { echo "$as_me:$LINENO: error: C++ preprocessor \"$CXXCPP\" fails sanity check -See \`config.log' for more details." >&5 -echo "$as_me: error: C++ preprocessor \"$CXXCPP\" fails sanity check -See \`config.log' for more details." >&2;} - { (exit 1); exit 1; }; } + { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 +$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} +as_fn_error $? "C++ preprocessor \"$CXXCPP\" fails sanity check +See \`config.log' for more details" "$LINENO" 5; } fi ac_ext=c @@ -12431,7 +13556,7 @@ echo "$lt_simple_compile_test_code" >conftest.$ac_ext eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_compiler_boilerplate=`cat conftest.err` -$RM -r conftest* +$RM conftest* ac_outfile=conftest.$ac_objext echo "$lt_simple_link_test_code" >conftest.$ac_ext @@ -12442,6 +13567,7 @@ # Allow CC to be a program name with arguments. lt_save_CC=$CC + lt_save_CFLAGS=$CFLAGS lt_save_LD=$LD lt_save_GCC=$GCC GCC=$GXX @@ -12459,6 +13585,7 @@ fi test -z "${LDCXX+set}" || LD=$LDCXX CC=${CXX-"c++"} + CFLAGS=$CXXFLAGS compiler=$CC compiler_CXX=$CC for cc_temp in $compiler""; do @@ -12487,7 +13614,7 @@ # Check whether --with-gnu-ld was given. -if test "${with_gnu_ld+set}" = set; then +if test "${with_gnu_ld+set}" = set; then : withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes else with_gnu_ld=no @@ -12496,8 +13623,8 @@ ac_prog=ld if test "$GCC" = yes; then # Check if gcc -print-prog-name=ld gives a path. - { echo "$as_me:$LINENO: checking for ld used by $CC" >&5 -echo $ECHO_N "checking for ld used by $CC... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5 +$as_echo_n "checking for ld used by $CC... " >&6; } case $host in *-*-mingw*) # gcc leaves a trailing carriage return which upsets mingw @@ -12526,14 +13653,14 @@ ;; esac elif test "$with_gnu_ld" = yes; then - { echo "$as_me:$LINENO: checking for GNU ld" >&5 -echo $ECHO_N "checking for GNU ld... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5 +$as_echo_n "checking for GNU ld... " >&6; } else - { echo "$as_me:$LINENO: checking for non-GNU ld" >&5 -echo $ECHO_N "checking for non-GNU ld... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5 +$as_echo_n "checking for non-GNU ld... " >&6; } fi -if test "${lt_cv_path_LD+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +if ${lt_cv_path_LD+:} false; then : + $as_echo_n "(cached) " >&6 else if test -z "$LD"; then lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR @@ -12563,19 +13690,17 @@ LD="$lt_cv_path_LD" if test -n "$LD"; then - { echo "$as_me:$LINENO: result: $LD" >&5 -echo "${ECHO_T}$LD" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LD" >&5 +$as_echo "$LD" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi -test -z "$LD" && { { echo "$as_me:$LINENO: error: no acceptable ld found in \$PATH" >&5 -echo "$as_me: error: no acceptable ld found in \$PATH" >&2;} - { (exit 1); exit 1; }; } -{ echo "$as_me:$LINENO: checking if the linker ($LD) is GNU ld" >&5 -echo $ECHO_N "checking if the linker ($LD) is GNU ld... $ECHO_C" >&6; } -if test "${lt_cv_prog_gnu_ld+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5 +$as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; } +if ${lt_cv_prog_gnu_ld+:} false; then : + $as_echo_n "(cached) " >&6 else # I'd rather use --version here, but apparently some GNU lds only accept -v. case `$LD -v 2>&1 &5 -echo "${ECHO_T}$lt_cv_prog_gnu_ld" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_gnu_ld" >&5 +$as_echo "$lt_cv_prog_gnu_ld" >&6; } with_gnu_ld=$lt_cv_prog_gnu_ld @@ -12600,8 +13725,8 @@ # Check if GNU C++ uses GNU ld as the underlying linker, since the # archiving commands below assume that GNU ld is being used. if test "$with_gnu_ld" = yes; then - archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + archive_cmds_CXX='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' + archive_expsym_cmds_CXX='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' hardcode_libdir_flag_spec_CXX='${wl}-rpath ${wl}$libdir' export_dynamic_flag_spec_CXX='${wl}--export-dynamic' @@ -12642,8 +13767,8 @@ fi # PORTME: fill in a description of your system's C++ link characteristics - { echo "$as_me:$LINENO: checking whether the $compiler linker ($LD) supports shared libraries" >&5 -echo $ECHO_N "checking whether the $compiler linker ($LD) supports shared libraries... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 +$as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } ld_shlibs_CXX=yes case $host_os in aix3*) @@ -12743,11 +13868,13 @@ allow_undefined_flag_CXX='-berok' # Determine the default libpath from the value encoded in an empty # executable. - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + if test "${lt_cv_aix_libpath+set}" = set; then + aix_libpath=$lt_cv_aix_libpath +else + if ${lt_cv_aix_libpath__CXX+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int @@ -12758,47 +13885,31 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then +if ac_fn_cxx_try_link "$LINENO"; then : -lt_aix_libpath_sed=' - /Import File Strings/,/^$/ { - /^0/ { - s/^0 *\(.*\)$/\1/ - p - } - }' -aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -# Check for a 64-bit object if we didn't find anything. -if test -z "$aix_libpath"; then - aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` + lt_aix_libpath_sed=' + /Import File Strings/,/^$/ { + /^0/ { + s/^0 *\([^ ]*\) *$/\1/ + p + } + }' + lt_cv_aix_libpath__CXX=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` + # Check for a 64-bit object if we didn't find anything. + if test -z "$lt_cv_aix_libpath__CXX"; then + lt_cv_aix_libpath__CXX=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` + fi fi -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + if test -z "$lt_cv_aix_libpath__CXX"; then + lt_cv_aix_libpath__CXX="/usr/lib:/lib" + fi fi -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext -if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + aix_libpath=$lt_cv_aix_libpath__CXX +fi hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath" @@ -12811,11 +13922,13 @@ else # Determine the default libpath from the value encoded in an # empty executable. - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + if test "${lt_cv_aix_libpath+set}" = set; then + aix_libpath=$lt_cv_aix_libpath +else + if ${lt_cv_aix_libpath__CXX+:} false; then : + $as_echo_n "(cached) " >&6 +else + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int @@ -12826,47 +13939,31 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then +if ac_fn_cxx_try_link "$LINENO"; then : -lt_aix_libpath_sed=' - /Import File Strings/,/^$/ { - /^0/ { - s/^0 *\(.*\)$/\1/ - p - } - }' -aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -# Check for a 64-bit object if we didn't find anything. -if test -z "$aix_libpath"; then - aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` + lt_aix_libpath_sed=' + /Import File Strings/,/^$/ { + /^0/ { + s/^0 *\([^ ]*\) *$/\1/ + p + } + }' + lt_cv_aix_libpath__CXX=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` + # Check for a 64-bit object if we didn't find anything. + if test -z "$lt_cv_aix_libpath__CXX"; then + lt_cv_aix_libpath__CXX=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` + fi fi -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + if test -z "$lt_cv_aix_libpath__CXX"; then + lt_cv_aix_libpath__CXX="/usr/lib:/lib" + fi fi -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext -if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + aix_libpath=$lt_cv_aix_libpath__CXX +fi hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, @@ -12909,29 +14006,75 @@ ;; cygwin* | mingw* | pw32* | cegcc*) - # _LT_TAGVAR(hardcode_libdir_flag_spec, CXX) is actually meaningless, - # as there is no search path for DLLs. - hardcode_libdir_flag_spec_CXX='-L$libdir' - export_dynamic_flag_spec_CXX='${wl}--export-all-symbols' - allow_undefined_flag_CXX=unsupported - always_export_symbols_CXX=no - enable_shared_with_static_runtimes_CXX=yes - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then - archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - # If the export-symbols file already is a .def file (1st line - # is EXPORTS), use it as is; otherwise, prepend... - archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then - cp $export_symbols $output_objdir/$soname.def; - else - echo EXPORTS > $output_objdir/$soname.def; - cat $export_symbols >> $output_objdir/$soname.def; - fi~ - $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - else - ld_shlibs_CXX=no - fi - ;; + case $GXX,$cc_basename in + ,cl* | no,cl*) + # Native MSVC + # hardcode_libdir_flag_spec is actually meaningless, as there is + # no search path for DLLs. + hardcode_libdir_flag_spec_CXX=' ' + allow_undefined_flag_CXX=unsupported + always_export_symbols_CXX=yes + file_list_spec_CXX='@' + # Tell ltmain to make .lib files, not .a files. + libext=lib + # Tell ltmain to make .dll files, not .so files. + shrext_cmds=".dll" + # FIXME: Setting linknames here is a bad hack. + archive_cmds_CXX='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' + archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then + $SED -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; + else + $SED -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; + fi~ + $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ + linknames=' + # The linker will not automatically build a static lib if we build a DLL. + # _LT_TAGVAR(old_archive_from_new_cmds, CXX)='true' + enable_shared_with_static_runtimes_CXX=yes + # Don't use ranlib + old_postinstall_cmds_CXX='chmod 644 $oldlib' + postlink_cmds_CXX='lt_outputfile="@OUTPUT@"~ + lt_tool_outputfile="@TOOL_OUTPUT@"~ + case $lt_outputfile in + *.exe|*.EXE) ;; + *) + lt_outputfile="$lt_outputfile.exe" + lt_tool_outputfile="$lt_tool_outputfile.exe" + ;; + esac~ + func_to_tool_file "$lt_outputfile"~ + if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then + $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; + $RM "$lt_outputfile.manifest"; + fi' + ;; + *) + # g++ + # _LT_TAGVAR(hardcode_libdir_flag_spec, CXX) is actually meaningless, + # as there is no search path for DLLs. + hardcode_libdir_flag_spec_CXX='-L$libdir' + export_dynamic_flag_spec_CXX='${wl}--export-all-symbols' + allow_undefined_flag_CXX=unsupported + always_export_symbols_CXX=no + enable_shared_with_static_runtimes_CXX=yes + + if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then + archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' + # If the export-symbols file already is a .def file (1st line + # is EXPORTS), use it as is; otherwise, prepend... + archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then + cp $export_symbols $output_objdir/$soname.def; + else + echo EXPORTS > $output_objdir/$soname.def; + cat $export_symbols >> $output_objdir/$soname.def; + fi~ + $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' + else + ld_shlibs_CXX=no + fi + ;; + esac + ;; darwin* | rhapsody*) @@ -13037,7 +14180,7 @@ ;; *) if test "$GXX" = yes; then - archive_cmds_CXX='$RM $output_objdir/$soname~$CC -shared -nostdlib -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + archive_cmds_CXX='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' else # FIXME: insert proper C++ library support ld_shlibs_CXX=no @@ -13108,10 +14251,10 @@ archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; ia64*) - archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' + archive_cmds_CXX='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; *) - archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' + archive_cmds_CXX='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; esac fi @@ -13152,9 +14295,9 @@ *) if test "$GXX" = yes; then if test "$with_gnu_ld" = no; then - archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' else - archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib' + archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib' fi fi link_all_deplibs_CXX=yes @@ -13432,7 +14575,7 @@ archive_cmds_CXX='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ;; *) - archive_cmds_CXX='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + archive_cmds_CXX='$CC -shared $pic_flag -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ;; esac @@ -13519,9 +14662,9 @@ if test "$GXX" = yes && test "$with_gnu_ld" = no; then no_undefined_flag_CXX=' ${wl}-z ${wl}defs' if $CC --version | $GREP -v '^2\.7' > /dev/null; then - archive_cmds_CXX='$CC -shared -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' + archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ - $CC -shared -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' + $CC -shared $pic_flag -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when @@ -13628,8 +14771,8 @@ ;; esac - { echo "$as_me:$LINENO: result: $ld_shlibs_CXX" >&5 -echo "${ECHO_T}$ld_shlibs_CXX" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_CXX" >&5 +$as_echo "$ld_shlibs_CXX" >&6; } test "$ld_shlibs_CXX" = no && can_build_shared=no GCC_CXX="$GXX" @@ -13656,11 +14799,18 @@ }; _LT_EOF -if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 + +_lt_libdeps_save_CFLAGS=$CFLAGS +case "$CC $CFLAGS " in #( +*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;; +*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;; +esac + +if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then # Parse the compiler output and extract the necessary # objects, libraries and library flags. @@ -13669,7 +14819,7 @@ pre_test_object_deps_done=no for p in `eval "$output_verbose_link_cmd"`; do - case $p in + case ${prev}${p} in -L* | -R* | -l*) # Some compilers place space between "-{L,R}" and the path. @@ -13678,13 +14828,22 @@ test $p = "-R"; then prev=$p continue - else - prev= fi + # Expand the sysroot to ease extracting the directories later. + if test -z "$prev"; then + case $p in + -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;; + -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;; + -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;; + esac + fi + case $p in + =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;; + esac if test "$pre_test_object_deps_done" = no; then - case $p in - -L* | -R*) + case ${prev} in + -L | -R) # Internal compiler library paths should come after those # provided the user. The postdeps already come after the # user supplied libs so there is no need to process them. @@ -13704,8 +14863,10 @@ postdeps_CXX="${postdeps_CXX} ${prev}${p}" fi fi + prev= ;; + *.lto.$objext) ;; # Ignore GCC LTO objects *.$objext) # This assumes that the test object file only shows up # once in the compiler output. @@ -13741,6 +14902,7 @@ fi $RM -f confest.$objext +CFLAGS=$_lt_libdeps_save_CFLAGS # PORTME: override above test on systems where it is broken case $host_os in @@ -13841,8 +15003,6 @@ lt_prog_compiler_pic_CXX= lt_prog_compiler_static_CXX= -{ echo "$as_me:$LINENO: checking for $compiler option to produce PIC" >&5 -echo $ECHO_N "checking for $compiler option to produce PIC... $ECHO_C" >&6; } # C++ specific cases for pic, static, wl, etc. if test "$GXX" = yes; then @@ -13946,6 +15106,11 @@ ;; esac ;; + mingw* | cygwin* | os2* | pw32* | cegcc*) + # This hack is so that the source file can tell whether it is being + # built for inclusion in a dll (and should export symbols for example). + lt_prog_compiler_pic_CXX='-DDLL_EXPORT' + ;; dgux*) case $cc_basename in ec++*) @@ -14163,19 +15328,26 @@ lt_prog_compiler_pic_CXX="$lt_prog_compiler_pic_CXX -DPIC" ;; esac -{ echo "$as_me:$LINENO: result: $lt_prog_compiler_pic_CXX" >&5 -echo "${ECHO_T}$lt_prog_compiler_pic_CXX" >&6; } - +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 +$as_echo_n "checking for $compiler option to produce PIC... " >&6; } +if ${lt_cv_prog_compiler_pic_CXX+:} false; then : + $as_echo_n "(cached) " >&6 +else + lt_cv_prog_compiler_pic_CXX=$lt_prog_compiler_pic_CXX +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_CXX" >&5 +$as_echo "$lt_cv_prog_compiler_pic_CXX" >&6; } +lt_prog_compiler_pic_CXX=$lt_cv_prog_compiler_pic_CXX # # Check to make sure the PIC flag actually works. # if test -n "$lt_prog_compiler_pic_CXX"; then - { echo "$as_me:$LINENO: checking if $compiler PIC flag $lt_prog_compiler_pic_CXX works" >&5 -echo $ECHO_N "checking if $compiler PIC flag $lt_prog_compiler_pic_CXX works... $ECHO_C" >&6; } -if test "${lt_cv_prog_compiler_pic_works_CXX+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic_CXX works" >&5 +$as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic_CXX works... " >&6; } +if ${lt_cv_prog_compiler_pic_works_CXX+:} false; then : + $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_pic_works_CXX=no ac_outfile=conftest.$ac_objext @@ -14204,11 +15376,11 @@ lt_cv_prog_compiler_pic_works_CXX=yes fi fi - $RM -r conftest* + $RM conftest* fi -{ echo "$as_me:$LINENO: result: $lt_cv_prog_compiler_pic_works_CXX" >&5 -echo "${ECHO_T}$lt_cv_prog_compiler_pic_works_CXX" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works_CXX" >&5 +$as_echo "$lt_cv_prog_compiler_pic_works_CXX" >&6; } if test x"$lt_cv_prog_compiler_pic_works_CXX" = xyes; then case $lt_prog_compiler_pic_CXX in @@ -14224,14 +15396,16 @@ + + # # Check to make sure the static flag actually works. # wl=$lt_prog_compiler_wl_CXX eval lt_tmp_static_flag=\"$lt_prog_compiler_static_CXX\" -{ echo "$as_me:$LINENO: checking if $compiler static flag $lt_tmp_static_flag works" >&5 -echo $ECHO_N "checking if $compiler static flag $lt_tmp_static_flag works... $ECHO_C" >&6; } -if test "${lt_cv_prog_compiler_static_works_CXX+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5 +$as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; } +if ${lt_cv_prog_compiler_static_works_CXX+:} false; then : + $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_static_works_CXX=no save_LDFLAGS="$LDFLAGS" @@ -14256,8 +15430,8 @@ LDFLAGS="$save_LDFLAGS" fi -{ echo "$as_me:$LINENO: result: $lt_cv_prog_compiler_static_works_CXX" >&5 -echo "${ECHO_T}$lt_cv_prog_compiler_static_works_CXX" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works_CXX" >&5 +$as_echo "$lt_cv_prog_compiler_static_works_CXX" >&6; } if test x"$lt_cv_prog_compiler_static_works_CXX" = xyes; then : @@ -14268,10 +15442,10 @@ - { echo "$as_me:$LINENO: checking if $compiler supports -c -o file.$ac_objext" >&5 -echo $ECHO_N "checking if $compiler supports -c -o file.$ac_objext... $ECHO_C" >&6; } -if test "${lt_cv_prog_compiler_c_o_CXX+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 +$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } +if ${lt_cv_prog_compiler_c_o_CXX+:} false; then : + $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_c_o_CXX=no $RM -r conftest 2>/dev/null @@ -14305,25 +15479,25 @@ fi fi chmod u+w . 2>&5 - $RM -r conftest* + $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest - $RM -r conftest* + $RM conftest* fi -{ echo "$as_me:$LINENO: result: $lt_cv_prog_compiler_c_o_CXX" >&5 -echo "${ECHO_T}$lt_cv_prog_compiler_c_o_CXX" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o_CXX" >&5 +$as_echo "$lt_cv_prog_compiler_c_o_CXX" >&6; } - { echo "$as_me:$LINENO: checking if $compiler supports -c -o file.$ac_objext" >&5 -echo $ECHO_N "checking if $compiler supports -c -o file.$ac_objext... $ECHO_C" >&6; } -if test "${lt_cv_prog_compiler_c_o_CXX+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 +$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } +if ${lt_cv_prog_compiler_c_o_CXX+:} false; then : + $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_c_o_CXX=no $RM -r conftest 2>/dev/null @@ -14357,18 +15531,18 @@ fi fi chmod u+w . 2>&5 - $RM -r conftest* + $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest - $RM -r conftest* + $RM conftest* fi -{ echo "$as_me:$LINENO: result: $lt_cv_prog_compiler_c_o_CXX" >&5 -echo "${ECHO_T}$lt_cv_prog_compiler_c_o_CXX" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o_CXX" >&5 +$as_echo "$lt_cv_prog_compiler_c_o_CXX" >&6; } @@ -14376,19 +15550,19 @@ hard_links="nottested" if test "$lt_cv_prog_compiler_c_o_CXX" = no && test "$need_locks" != no; then # do not overwrite the value of need_locks provided by the user - { echo "$as_me:$LINENO: checking if we can lock with hard links" >&5 -echo $ECHO_N "checking if we can lock with hard links... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5 +$as_echo_n "checking if we can lock with hard links... " >&6; } hard_links=yes - $RM -r conftest* + $RM conftest* ln conftest.a conftest.b 2>/dev/null && hard_links=no touch conftest.a ln conftest.a conftest.b 2>&5 || hard_links=no ln conftest.a conftest.b 2>/dev/null && hard_links=no - { echo "$as_me:$LINENO: result: $hard_links" >&5 -echo "${ECHO_T}$hard_links" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5 +$as_echo "$hard_links" >&6; } if test "$hard_links" = no; then - { echo "$as_me:$LINENO: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 -echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 +$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} need_locks=warn fi else @@ -14397,10 +15571,11 @@ - { echo "$as_me:$LINENO: checking whether the $compiler linker ($LD) supports shared libraries" >&5 -echo $ECHO_N "checking whether the $compiler linker ($LD) supports shared libraries... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 +$as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' + exclude_expsyms_CXX='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' case $host_os in aix[4-9]*) # If we're using GNU nm, then we don't want the "-C" option. @@ -14415,18 +15590,23 @@ ;; pw32*) export_symbols_cmds_CXX="$ltdll_cmds" - ;; + ;; cygwin* | mingw* | cegcc*) - export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;/^.*[ ]__nm__/s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' - ;; + case $cc_basename in + cl*) ;; + *) + export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' + exclude_expsyms_CXX='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' + ;; + esac + ;; *) export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' - ;; + ;; esac - exclude_expsyms_CXX='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' -{ echo "$as_me:$LINENO: result: $ld_shlibs_CXX" >&5 -echo "${ECHO_T}$ld_shlibs_CXX" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_CXX" >&5 +$as_echo "$ld_shlibs_CXX" >&6; } test "$ld_shlibs_CXX" = no && can_build_shared=no with_gnu_ld_CXX=$with_gnu_ld @@ -14453,19 +15633,19 @@ # Test whether the compiler implicitly links with -lc since on some # systems, -lgcc has to come before -lc. If gcc already passes -lc # to ld, don't add -lc before -lgcc. - { echo "$as_me:$LINENO: checking whether -lc should be explicitly linked in" >&5 -echo $ECHO_N "checking whether -lc should be explicitly linked in... $ECHO_C" >&6; } -if test "${lt_cv_archive_cmds_need_lc_CXX+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5 +$as_echo_n "checking whether -lc should be explicitly linked in... " >&6; } +if ${lt_cv_archive_cmds_need_lc_CXX+:} false; then : + $as_echo_n "(cached) " >&6 else - $RM -r conftest* + $RM conftest* echo "$lt_simple_compile_test_code" > conftest.$ac_ext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } 2>conftest.err; then + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } 2>conftest.err; then soname=conftest lib=conftest libobjs=conftest.$ac_objext @@ -14479,11 +15659,11 @@ libname=conftest lt_save_allow_undefined_flag=$allow_undefined_flag_CXX allow_undefined_flag_CXX= - if { (eval echo "$as_me:$LINENO: \"$archive_cmds_CXX 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\"") >&5 + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds_CXX 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5 (eval $archive_cmds_CXX 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5 ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; } then lt_cv_archive_cmds_need_lc_CXX=no else @@ -14493,11 +15673,11 @@ else cat conftest.err 1>&5 fi - $RM -r conftest* + $RM conftest* fi -{ echo "$as_me:$LINENO: result: $lt_cv_archive_cmds_need_lc_CXX" >&5 -echo "${ECHO_T}$lt_cv_archive_cmds_need_lc_CXX" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_archive_cmds_need_lc_CXX" >&5 +$as_echo "$lt_cv_archive_cmds_need_lc_CXX" >&6; } archive_cmds_need_lc_CXX=$lt_cv_archive_cmds_need_lc_CXX ;; esac @@ -14568,8 +15748,8 @@ - { echo "$as_me:$LINENO: checking dynamic linker characteristics" >&5 -echo $ECHO_N "checking dynamic linker characteristics... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5 +$as_echo_n "checking dynamic linker characteristics... " >&6; } library_names_spec= libname_spec='lib$name' @@ -14686,8 +15866,9 @@ need_version=no need_lib_prefix=no - case $GCC,$host_os in - yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) + case $GCC,$cc_basename in + yes,*) + # gcc library_names_spec='$libname.dll.a' # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \${file}`~ @@ -14719,13 +15900,71 @@ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' ;; esac + dynamic_linker='Win32 ld.exe' + ;; + + *,cl*) + # Native MSVC + libname_spec='$name' + soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' + library_names_spec='${libname}.dll.lib' + + case $build_os in + mingw*) + sys_lib_search_path_spec= + lt_save_ifs=$IFS + IFS=';' + for lt_path in $LIB + do + IFS=$lt_save_ifs + # Let DOS variable expansion print the short 8.3 style file name. + lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` + sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" + done + IFS=$lt_save_ifs + # Convert to MSYS style. + sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` + ;; + cygwin*) + # Convert to unix form, then to dos form, then back to unix form + # but this time dos style (no spaces!) so that the unix form looks + # like /cygdrive/c/PROGRA~1:/cygdr... + sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` + sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` + sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` + ;; + *) + sys_lib_search_path_spec="$LIB" + if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then + # It is most probably a Windows format PATH. + sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` + else + sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` + fi + # FIXME: find the short name or the path components, as spaces are + # common. (e.g. "Program Files" -> "PROGRA~1") + ;; + esac + + # DLL is installed to $(libdir)/../bin by postinstall_cmds + postinstall_cmds='base_file=`basename \${file}`~ + dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ + dldir=$destdir/`dirname \$dlpath`~ + test -d \$dldir || mkdir -p \$dldir~ + $install_prog $dir/$dlname \$dldir/$dlname' + postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ + dlpath=$dir/\$dldll~ + $RM \$dlpath' + shlibpath_overrides_runpath=yes + dynamic_linker='Win32 link.exe' ;; *) + # Assume MSVC wrapper library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' + dynamic_linker='Win32 ld.exe' ;; esac - dynamic_linker='Win32 ld.exe' # FIXME: first we should search . and the directory the executable is in shlibpath_var=PATH ;; @@ -14735,7 +15974,7 @@ version_type=darwin need_lib_prefix=no need_version=no - library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext ${libname}${release}${versuffix}$shared_ext' + library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' soname_spec='${libname}${release}${major}$shared_ext' shlibpath_overrides_runpath=yes shlibpath_var=DYLD_LIBRARY_PATH @@ -14768,7 +16007,14 @@ *) objformat=elf ;; esac fi - version_type=freebsd-$objformat + # Handle Gentoo/FreeBSD as it was Linux + case $host_vendor in + gentoo) + version_type=linux ;; + *) + version_type=freebsd-$objformat ;; + esac + case $version_type in freebsd-elf*) library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' @@ -14779,6 +16025,12 @@ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' need_version=yes ;; + linux) + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + need_lib_prefix=no + need_version=no + ;; esac shlibpath_var=LD_LIBRARY_PATH case $host_os in @@ -14938,19 +16190,15 @@ shlibpath_overrides_runpath=no # Some binutils ld are patched to set DT_RUNPATH - if test "${lt_cv_shlibpath_overrides_runpath+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + if ${lt_cv_shlibpath_overrides_runpath+:} false; then : + $as_echo_n "(cached) " >&6 else lt_cv_shlibpath_overrides_runpath=no save_LDFLAGS=$LDFLAGS save_libdir=$libdir eval "libdir=/foo; wl=\"$lt_prog_compiler_wl_CXX\"; \ LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec_CXX\"" - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int @@ -14961,37 +16209,13 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then - if ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then +if ac_fn_cxx_try_link "$LINENO"; then : + if ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then : lt_cv_shlibpath_overrides_runpath=yes fi - -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext LDFLAGS=$save_LDFLAGS libdir=$save_libdir @@ -15207,8 +16431,8 @@ dynamic_linker=no ;; esac -{ echo "$as_me:$LINENO: result: $dynamic_linker" >&5 -echo "${ECHO_T}$dynamic_linker" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5 +$as_echo "$dynamic_linker" >&6; } test "$dynamic_linker" = no && can_build_shared=no variables_saved_for_relink="PATH $shlibpath_var $runpath_var" @@ -15260,8 +16484,8 @@ - { echo "$as_me:$LINENO: checking how to hardcode library paths into programs" >&5 -echo $ECHO_N "checking how to hardcode library paths into programs... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5 +$as_echo_n "checking how to hardcode library paths into programs... " >&6; } hardcode_action_CXX= if test -n "$hardcode_libdir_flag_spec_CXX" || test -n "$runpath_var_CXX" || @@ -15285,8 +16509,8 @@ # directories. hardcode_action_CXX=unsupported fi -{ echo "$as_me:$LINENO: result: $hardcode_action_CXX" >&5 -echo "${ECHO_T}$hardcode_action_CXX" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action_CXX" >&5 +$as_echo "$hardcode_action_CXX" >&6; } if test "$hardcode_action_CXX" = relink || test "$inherit_rpath_CXX" = yes; then @@ -15307,6 +16531,7 @@ fi # test -n "$compiler" CC=$lt_save_CC + CFLAGS=$lt_save_CFLAGS LDCXX=$LD LD=$lt_save_LD GCC=$lt_save_GCC @@ -15330,1096 +16555,511 @@ ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu -{ echo "$as_me:$LINENO: checking whether byte ordering is bigendian" >&5 -echo $ECHO_N "checking whether byte ordering is bigendian... $ECHO_C" >&6; } -if test "${ac_cv_c_bigendian+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - # See if sys/param.h defines the BYTE_ORDER macro. -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF +cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ -#include -#include int main () { -#if ! (defined BYTE_ORDER && defined BIG_ENDIAN && defined LITTLE_ENDIAN \ - && BYTE_ORDER && BIG_ENDIAN && LITTLE_ENDIAN) - bogus endian macros -#endif ; return 0; } _ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then - # It does; now see whether it defined to BIG_ENDIAN or not. -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -#include -#include - -int -main () -{ -#if BYTE_ORDER != BIG_ENDIAN - not big endian -#endif +if ac_fn_cxx_try_link "$LINENO"; then : - ; - return 0; -} -_ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then - ac_cv_c_bigendian=yes else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_cv_c_bigendian=no + as_fn_error $? "A C++ compiler is required." "$LINENO" 5 fi - -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether byte ordering is bigendian" >&5 +$as_echo_n "checking whether byte ordering is bigendian... " >&6; } +if ${ac_cv_c_bigendian+:} false; then : + $as_echo_n "(cached) " >&6 else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - # It does not; compile a test program. -if test "$cross_compiling" = yes; then - # try to guess the endianness by grepping values into an object file ac_cv_c_bigendian=unknown - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 }; -short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 }; -void _ascii () { char *s = (char *) ascii_mm; s = (char *) ascii_ii; } -short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 }; -short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 }; -void _ebcdic () { char *s = (char *) ebcdic_mm; s = (char *) ebcdic_ii; } -int -main () -{ - _ascii (); _ebcdic (); - ; - return 0; -} -_ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then - if grep BIGenDianSyS conftest.$ac_objext >/dev/null ; then - ac_cv_c_bigendian=yes -fi -if grep LiTTleEnDian conftest.$ac_objext >/dev/null ; then - if test "$ac_cv_c_bigendian" = unknown; then - ac_cv_c_bigendian=no - else - # finding both strings is unlikely to happen, but who knows? - ac_cv_c_bigendian=unknown - fi -fi -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - -fi - -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -else - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -$ac_includes_default -int -main () -{ - - /* Are we little or big endian? From Harbison&Steele. */ - union - { - long int l; - char c[sizeof (long int)]; - } u; - u.l = 1; - return u.c[sizeof (long int) - 1] == 1; - - ; - return 0; -} -_ACEOF -rm -f conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { ac_try='./conftest$ac_exeext' - { (case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; }; then - ac_cv_c_bigendian=no -else - echo "$as_me: program exited with status $ac_status" >&5 -echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - -( exit $ac_status ) -ac_cv_c_bigendian=yes -fi -rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext conftest.$ac_objext conftest.$ac_ext -fi - - -fi - -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -{ echo "$as_me:$LINENO: result: $ac_cv_c_bigendian" >&5 -echo "${ECHO_T}$ac_cv_c_bigendian" >&6; } -case $ac_cv_c_bigendian in - yes) - -cat >>confdefs.h <<\_ACEOF -#define WORDS_BIGENDIAN 1 -_ACEOF - ;; - no) - ;; - *) - { { echo "$as_me:$LINENO: error: unknown endianness -presetting ac_cv_c_bigendian=no (or yes) will help" >&5 -echo "$as_me: error: unknown endianness -presetting ac_cv_c_bigendian=no (or yes) will help" >&2;} - { (exit 1); exit 1; }; } ;; -esac - - - - - - -for ac_header in stdint.h stddef.h sys/mman.h sys/resource.h -do -as_ac_Header=`echo "ac_cv_header_$ac_header" | $as_tr_sh` -if { as_var=$as_ac_Header; eval "test \"\${$as_var+set}\" = set"; }; then - { echo "$as_me:$LINENO: checking for $ac_header" >&5 -echo $ECHO_N "checking for $ac_header... $ECHO_C" >&6; } -if { as_var=$as_ac_Header; eval "test \"\${$as_var+set}\" = set"; }; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -fi -ac_res=`eval echo '${'$as_ac_Header'}'` - { echo "$as_me:$LINENO: result: $ac_res" >&5 -echo "${ECHO_T}$ac_res" >&6; } -else - # Is the header compilable? -{ echo "$as_me:$LINENO: checking $ac_header usability" >&5 -echo $ECHO_N "checking $ac_header usability... $ECHO_C" >&6; } -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + # See if we're dealing with a universal compiler. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ -$ac_includes_default -#include <$ac_header> -_ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then - ac_header_compiler=yes -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_header_compiler=no +#ifndef __APPLE_CC__ + not a universal capable compiler + #endif + typedef int dummy; + +_ACEOF +if ac_fn_cxx_try_compile "$LINENO"; then : + + # Check for potential -arch flags. It is not universal unless + # there are at least two -arch flags with different values. + ac_arch= + ac_prev= + for ac_word in $CC $CFLAGS $CPPFLAGS $LDFLAGS; do + if test -n "$ac_prev"; then + case $ac_word in + i?86 | x86_64 | ppc | ppc64) + if test -z "$ac_arch" || test "$ac_arch" = "$ac_word"; then + ac_arch=$ac_word + else + ac_cv_c_bigendian=universal + break + fi + ;; + esac + ac_prev= + elif test "x$ac_word" = "x-arch"; then + ac_prev=arch + fi + done fi - rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -{ echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 -echo "${ECHO_T}$ac_header_compiler" >&6; } - -# Is the header present? -{ echo "$as_me:$LINENO: checking $ac_header presence" >&5 -echo $ECHO_N "checking $ac_header presence... $ECHO_C" >&6; } -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -#include <$ac_header> -_ACEOF -if { (ac_try="$ac_cpp conftest.$ac_ext" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_cpp conftest.$ac_ext") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } >/dev/null && { - test -z "$ac_cxx_preproc_warn_flag$ac_cxx_werror_flag" || - test ! -s conftest.err - }; then - ac_header_preproc=yes -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_header_preproc=no -fi - -rm -f conftest.err conftest.$ac_ext -{ echo "$as_me:$LINENO: result: $ac_header_preproc" >&5 -echo "${ECHO_T}$ac_header_preproc" >&6; } - -# So? What about this header? -case $ac_header_compiler:$ac_header_preproc:$ac_cxx_preproc_warn_flag in - yes:no: ) - { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5 -echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;} - { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5 -echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;} - ac_header_preproc=yes - ;; - no:yes:* ) - { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 -echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} - { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 -echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} - { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5 -echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;} - { echo "$as_me:$LINENO: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&5 -echo "$as_me: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&2;} - { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 -echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} - { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5 -echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;} - ( cat <<\_ASBOX -## ------------------------------------------------------------ ## -## Report this to https://issues.apache.org/jira/browse/COUCHDB ## -## ------------------------------------------------------------ ## -_ASBOX - ) | sed "s/^/$as_me: WARNING: /" >&2 - ;; -esac -{ echo "$as_me:$LINENO: checking for $ac_header" >&5 -echo $ECHO_N "checking for $ac_header... $ECHO_C" >&6; } -if { as_var=$as_ac_Header; eval "test \"\${$as_var+set}\" = set"; }; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - eval "$as_ac_Header=\$ac_header_preproc" -fi -ac_res=`eval echo '${'$as_ac_Header'}'` - { echo "$as_me:$LINENO: result: $ac_res" >&5 -echo "${ECHO_T}$ac_res" >&6; } - -fi -if test `eval echo '${'$as_ac_Header'}'` = yes; then - cat >>confdefs.h <<_ACEOF -#define `echo "HAVE_$ac_header" | $as_tr_cpp` 1 -_ACEOF - -fi - -done - -{ echo "$as_me:$LINENO: checking for mmap" >&5 -echo $ECHO_N "checking for mmap... $ECHO_C" >&6; } -if test "${ac_cv_func_mmap+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + if test $ac_cv_c_bigendian = unknown; then + # See if sys/param.h defines the BYTE_ORDER macro. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ -/* Define mmap to an innocuous variant, in case declares mmap. - For example, HP-UX 11i declares gettimeofday. */ -#define mmap innocuous_mmap - -/* System header to define __stub macros and hopefully few prototypes, - which can conflict with char mmap (); below. - Prefer to if __STDC__ is defined, since - exists even on freestanding compilers. */ - -#ifdef __STDC__ -# include -#else -# include -#endif - -#undef mmap - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char mmap (); -/* The GNU C library defines this for functions which it implements - to always fail with ENOSYS. Some functions are actually named - something starting with __ and the normal name is an alias. */ -#if defined __stub_mmap || defined __stub___mmap -choke me -#endif +#include + #include int main () { -return mmap (); +#if ! (defined BYTE_ORDER && defined BIG_ENDIAN \ + && defined LITTLE_ENDIAN && BYTE_ORDER && BIG_ENDIAN \ + && LITTLE_ENDIAN) + bogus endian macros + #endif + ; return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then - ac_cv_func_mmap=yes -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_cv_func_mmap=no -fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext -fi -{ echo "$as_me:$LINENO: result: $ac_cv_func_mmap" >&5 -echo "${ECHO_T}$ac_cv_func_mmap" >&6; } - - -{ echo "$as_me:$LINENO: checking if the compiler supports __builtin_expect" >&5 -echo $ECHO_N "checking if the compiler supports __builtin_expect... $ECHO_C" >&6; } - -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF +if ac_fn_cxx_try_compile "$LINENO"; then : + # It does; now see whether it defined to BIG_ENDIAN or not. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ +#include + #include int main () { - - return __builtin_expect(1, 1) ? 1 : 0 +#if BYTE_ORDER != BIG_ENDIAN + not big endian + #endif ; return 0; } _ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then - - snappy_have_builtin_expect=yes - { echo "$as_me:$LINENO: result: yes" >&5 -echo "${ECHO_T}yes" >&6; } - -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - - snappy_have_builtin_expect=no - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } - +if ac_fn_cxx_try_compile "$LINENO"; then : + ac_cv_c_bigendian=yes +else + ac_cv_c_bigendian=no fi - rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -if test x$snappy_have_builtin_expect = xyes ; then - -cat >>confdefs.h <<\_ACEOF -#define HAVE_BUILTIN_EXPECT 1 -_ACEOF - fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + fi + if test $ac_cv_c_bigendian = unknown; then + # See if defines _LITTLE_ENDIAN or _BIG_ENDIAN (e.g., Solaris). + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +#include -{ echo "$as_me:$LINENO: checking if the compiler supports __builtin_ctzll" >&5 -echo $ECHO_N "checking if the compiler supports __builtin_ctzll... $ECHO_C" >&6; } +int +main () +{ +#if ! (defined _LITTLE_ENDIAN || defined _BIG_ENDIAN) + bogus endian macros + #endif -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ + ; + return 0; +} _ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF +if ac_fn_cxx_try_compile "$LINENO"; then : + # It does; now see whether it defined to _BIG_ENDIAN or not. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ +#include int main () { - - return (__builtin_ctzll(0x100000000LL) == 32) ? 1 : 0 +#ifndef _BIG_ENDIAN + not big endian + #endif ; return 0; } _ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then - - snappy_have_builtin_ctz=yes - { echo "$as_me:$LINENO: result: yes" >&5 -echo "${ECHO_T}yes" >&6; } - +if ac_fn_cxx_try_compile "$LINENO"; then : + ac_cv_c_bigendian=yes else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - - snappy_have_builtin_ctz=no - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } - + ac_cv_c_bigendian=no fi - rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -if test x$snappy_have_builtin_ctz = xyes ; then - -cat >>confdefs.h <<\_ACEOF -#define HAVE_BUILTIN_CTZ 1 -_ACEOF - -fi - -if test "$ac_cv_header_stdint_h" = "yes"; then - ac_cv_have_stdint_h=1 - -else - ac_cv_have_stdint_h=0 - -fi -if test "$ac_cv_header_stddef_h" = "yes"; then - ac_cv_have_stddef_h=1 - -else - ac_cv_have_stddef_h=0 - fi - -SNAPPY_MAJOR="1" -SNAPPY_MINOR="0" -SNAPPY_PATCHLEVEL="3" - - - - - -{ echo "$as_me:$LINENO: checking for pthread_create in -lpthread" >&5 -echo $ECHO_N "checking for pthread_create in -lpthread... $ECHO_C" >&6; } - -original_LIBS="$LIBS" -LIBS="-lpthread $original_LIBS" - -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + fi + if test $ac_cv_c_bigendian = unknown; then + # Compile a test program. + if test "$cross_compiling" = yes; then : + # Try to guess by grepping values from an object file. + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ -#include +short int ascii_mm[] = + { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 }; + short int ascii_ii[] = + { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 }; + int use_ascii (int i) { + return ascii_mm[i] + ascii_ii[i]; + } + short int ebcdic_ii[] = + { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 }; + short int ebcdic_mm[] = + { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 }; + int use_ebcdic (int i) { + return ebcdic_mm[i] + ebcdic_ii[i]; + } + extern int foo; + int main () { -pthread_create((void *)0, (void *)0, (void *)0, (void *)0) +return use_ascii (foo) == use_ebcdic (foo); ; return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then - pthread=yes -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - pthread=no +if ac_fn_cxx_try_compile "$LINENO"; then : + if grep BIGenDianSyS conftest.$ac_objext >/dev/null; then + ac_cv_c_bigendian=yes + fi + if grep LiTTleEnDian conftest.$ac_objext >/dev/null ; then + if test "$ac_cv_c_bigendian" = unknown; then + ac_cv_c_bigendian=no + else + # finding both strings is unlikely to happen, but who knows? + ac_cv_c_bigendian=unknown + fi + fi fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext - -if test x${pthread} = xyes; then - { echo "$as_me:$LINENO: result: yes" >&5 -echo "${ECHO_T}yes" >&6; } +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext else - LIBS="$original_LIBS" - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } -fi - - - -case $(uname -s) in - CYGWIN*) ;; - *) - CPPFLAGS="$CPPFLAGS -I/opt/local/include" - CPPFLAGS="$CPPFLAGS -I/usr/local/include" - CPPFLAGS="$CPPFLAGS -I/usr/include" - LDFLAGS="$LDFLAGS -L/opt/local/lib" - LDFLAGS="$LDFLAGS -L/usr/local/lib" - ;; -esac + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ +$ac_includes_default +int +main () +{ -CPPFLAGS="$CPPFLAGS $FLAGS" -LDFLAGS="$LDFLAGS $FLAGS" + /* Are we little or big endian? From Harbison&Steele. */ + union + { + long int l; + char c[sizeof (long int)]; + } u; + u.l = 1; + return u.c[sizeof (long int) - 1] == 1; -# Extract the first word of "erl", so it can be a program name with args. -set dummy erl; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_path_ERL+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_run "$LINENO"; then : + ac_cv_c_bigendian=no else - case $ERL in - [\\/]* | ?:[\\/]*) - ac_cv_path_ERL="$ERL" # Let the user override the test with a path. - ;; - *) - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_path_ERL="$as_dir/$ac_word$ac_exec_ext" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done -done -IFS=$as_save_IFS - - ;; -esac + ac_cv_c_bigendian=yes fi -ERL=$ac_cv_path_ERL -if test -n "$ERL"; then - { echo "$as_me:$LINENO: result: $ERL" >&5 -echo "${ECHO_T}$ERL" >&6; } -else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } +rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ + conftest.$ac_objext conftest.beam conftest.$ac_ext fi - -if test x${ERL} = x; then - - { { echo "$as_me:$LINENO: error: Could not find the \`erl' executable. Is Erlang installed?" >&5 -echo "$as_me: error: Could not find the \`erl' executable. Is Erlang installed?" >&2;} - { (exit 1); exit 1; }; } - + fi fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_bigendian" >&5 +$as_echo "$ac_cv_c_bigendian" >&6; } + case $ac_cv_c_bigendian in #( + yes) + $as_echo "#define WORDS_BIGENDIAN 1" >>confdefs.h +;; #( + no) + ;; #( + universal) +$as_echo "#define AC_APPLE_UNIVERSAL_BUILD 1" >>confdefs.h + ;; #( + *) + as_fn_error $? "unknown endianness + presetting ac_cv_c_bigendian=no (or yes) will help" "$LINENO" 5 ;; + esac -# Check whether --with-erlang was given. -if test "${with_erlang+set}" = set; then - withval=$with_erlang; - ERLANG_FLAGS="-I$withval" - -else - - realerl=`readlink -f $ERL 2>/dev/null` - if test $? -eq 0; then - - erlbase=`dirname $realerl` - erlbase=`dirname $erlbase` - ERLANG_FLAGS="-I${erlbase}/usr/include" - -else - - # Failed to figure out where erl is installed.. - # try to add some default directories to search - ERLANG_FLAGS="-I${libdir}/erlang/usr/include" - ERLANG_FLAGS="$ERLANG_FLAGS -I/usr/lib/erlang/usr/include" - ERLANG_FLAGS="$ERLANG_FLAGS -I/usr/local/lib/erlang/usr/include" - ERLANG_FLAGS="$ERLANG_FLAGS -I/opt/local/lib/erlang/usr/include" - -fi +for ac_header in stdint.h stddef.h sys/mman.h sys/resource.h +do : + as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` +ac_fn_cxx_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" +if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : + cat >>confdefs.h <<_ACEOF +#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 +_ACEOF fi +done +ac_fn_cxx_check_func "$LINENO" "mmap" "ac_cv_func_mmap" +if test "x$ac_cv_func_mmap" = xyes; then : -if test -n "$PKG_CONFIG" && \ - { (echo "$as_me:$LINENO: \$PKG_CONFIG --exists --print-errors \"mozjs185\"") >&5 - ($PKG_CONFIG --exists --print-errors "mozjs185") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then - - if test -n "$PKG_CONFIG" && \ - { (echo "$as_me:$LINENO: \$PKG_CONFIG --exists --print-errors \"nspr\"") >&5 - ($PKG_CONFIG --exists --print-errors "nspr") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then - : -else - - { echo "$as_me:$LINENO: WARNING: - -You have the pkg-config file for mozjs185 isntalled but no pkg-config -file for NSPR. More than likely configure will fail. If it does it most -likely means you need to find on install the pkg-config file for NSPR. -This most commonly occurs on Mac OS X with older versions of Homebrew. +fi -You can correct this by removing SpiderMonkey and NSPR, updating -Homebrew and reinstalling. -" >&5 -echo "$as_me: WARNING: +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if the compiler supports __builtin_expect" >&5 +$as_echo_n "checking if the compiler supports __builtin_expect... " >&6; } -You have the pkg-config file for mozjs185 isntalled but no pkg-config -file for NSPR. More than likely configure will fail. If it does it most -likely means you need to find on install the pkg-config file for NSPR. -This most commonly occurs on Mac OS X with older versions of Homebrew. +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ -You can correct this by removing SpiderMonkey and NSPR, updating -Homebrew and reinstalling. +int +main () +{ -" >&2;} -fi -fi + return __builtin_expect(1, 1) ? 1 : 0 + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_compile "$LINENO"; then : -# Check whether --with-js-lib was given. -if test "${with_js_lib+set}" = set; then - withval=$with_js_lib; - JS_LIB_DIR=$withval + snappy_have_builtin_expect=yes + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } else + snappy_have_builtin_expect=no + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } -pkg_failed=no -{ echo "$as_me:$LINENO: checking for JS185" >&5 -echo $ECHO_N "checking for JS185... $ECHO_C" >&6; } - -if test -n "$JS185_CFLAGS"; then - pkg_cv_JS185_CFLAGS="$JS185_CFLAGS" - elif test -n "$PKG_CONFIG"; then - if test -n "$PKG_CONFIG" && \ - { (echo "$as_me:$LINENO: \$PKG_CONFIG --exists --print-errors \"mozjs185\"") >&5 - ($PKG_CONFIG --exists --print-errors "mozjs185") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then - pkg_cv_JS185_CFLAGS=`$PKG_CONFIG --cflags "mozjs185" 2>/dev/null` -else - pkg_failed=yes -fi - else - pkg_failed=untried -fi -if test -n "$JS185_LIBS"; then - pkg_cv_JS185_LIBS="$JS185_LIBS" - elif test -n "$PKG_CONFIG"; then - if test -n "$PKG_CONFIG" && \ - { (echo "$as_me:$LINENO: \$PKG_CONFIG --exists --print-errors \"mozjs185\"") >&5 - ($PKG_CONFIG --exists --print-errors "mozjs185") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then - pkg_cv_JS185_LIBS=`$PKG_CONFIG --libs "mozjs185" 2>/dev/null` -else - pkg_failed=yes -fi - else - pkg_failed=untried fi +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +if test x$snappy_have_builtin_expect = xyes ; then +$as_echo "#define HAVE_BUILTIN_EXPECT 1" >>confdefs.h -if test $pkg_failed = yes; then - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } - -if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then - _pkg_short_errors_supported=yes -else - _pkg_short_errors_supported=no fi - if test $_pkg_short_errors_supported = yes; then - JS185_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors "mozjs185" 2>&1` - else - JS185_PKG_ERRORS=`$PKG_CONFIG --print-errors "mozjs185" 2>&1` - fi - # Put the nasty error message in config.log where it belongs - echo "$JS185_PKG_ERRORS" >&5 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if the compiler supports __builtin_ctzll" >&5 +$as_echo_n "checking if the compiler supports __builtin_ctzll... " >&6; } +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ -pkg_failed=no -{ echo "$as_me:$LINENO: checking for JS" >&5 -echo $ECHO_N "checking for JS... $ECHO_C" >&6; } - -if test -n "$JS_CFLAGS"; then - pkg_cv_JS_CFLAGS="$JS_CFLAGS" - elif test -n "$PKG_CONFIG"; then - if test -n "$PKG_CONFIG" && \ - { (echo "$as_me:$LINENO: \$PKG_CONFIG --exists --print-errors \"mozilla-js >= 1.7\"") >&5 - ($PKG_CONFIG --exists --print-errors "mozilla-js >= 1.7") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then - pkg_cv_JS_CFLAGS=`$PKG_CONFIG --cflags "mozilla-js >= 1.7" 2>/dev/null` -else - pkg_failed=yes -fi - else - pkg_failed=untried -fi -if test -n "$JS_LIBS"; then - pkg_cv_JS_LIBS="$JS_LIBS" - elif test -n "$PKG_CONFIG"; then - if test -n "$PKG_CONFIG" && \ - { (echo "$as_me:$LINENO: \$PKG_CONFIG --exists --print-errors \"mozilla-js >= 1.7\"") >&5 - ($PKG_CONFIG --exists --print-errors "mozilla-js >= 1.7") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then - pkg_cv_JS_LIBS=`$PKG_CONFIG --libs "mozilla-js >= 1.7" 2>/dev/null` -else - pkg_failed=yes -fi - else - pkg_failed=untried -fi +int +main () +{ + return (__builtin_ctzll(0x100000000LL) == 32) ? 1 : 0 + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_compile "$LINENO"; then : -if test $pkg_failed = yes; then - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + snappy_have_builtin_ctz=yes + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } -if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then - _pkg_short_errors_supported=yes else - _pkg_short_errors_supported=no + + snappy_have_builtin_ctz=no + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + fi - if test $_pkg_short_errors_supported = yes; then - JS_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors "mozilla-js >= 1.7" 2>&1` - else - JS_PKG_ERRORS=`$PKG_CONFIG --print-errors "mozilla-js >= 1.7" 2>&1` - fi - # Put the nasty error message in config.log where it belongs - echo "$JS_PKG_ERRORS" >&5 +rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext +if test x$snappy_have_builtin_ctz = xyes ; then - JS_LIB_DIR="${libdir}" +$as_echo "#define HAVE_BUILTIN_CTZ 1" >>confdefs.h -elif test $pkg_failed = untried; then - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } +fi - JS_LIB_DIR="${libdir}" +if test "$ac_cv_header_stdint_h" = "yes"; then + ac_cv_have_stdint_h=1 else - JS_CFLAGS=$pkg_cv_JS_CFLAGS - JS_LIBS=$pkg_cv_JS_LIBS - { echo "$as_me:$LINENO: result: yes" >&5 -echo "${ECHO_T}yes" >&6; } + ac_cv_have_stdint_h=0 + +fi +if test "$ac_cv_header_stddef_h" = "yes"; then + ac_cv_have_stddef_h=1 - JS_LIB_DIR="$(${PKG_CONFIG} --variable=sdkdir mozilla-js)/lib" +else + ac_cv_have_stddef_h=0 fi -elif test $pkg_failed = untried; then - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } +SNAPPY_MAJOR="1" +SNAPPY_MINOR="0" +SNAPPY_PATCHLEVEL="3" -pkg_failed=no -{ echo "$as_me:$LINENO: checking for JS" >&5 -echo $ECHO_N "checking for JS... $ECHO_C" >&6; } -if test -n "$JS_CFLAGS"; then - pkg_cv_JS_CFLAGS="$JS_CFLAGS" - elif test -n "$PKG_CONFIG"; then - if test -n "$PKG_CONFIG" && \ - { (echo "$as_me:$LINENO: \$PKG_CONFIG --exists --print-errors \"mozilla-js >= 1.7\"") >&5 - ($PKG_CONFIG --exists --print-errors "mozilla-js >= 1.7") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then - pkg_cv_JS_CFLAGS=`$PKG_CONFIG --cflags "mozilla-js >= 1.7" 2>/dev/null` + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for pthread_create in -lpthread" >&5 +$as_echo_n "checking for pthread_create in -lpthread... " >&6; } + +original_LIBS="$LIBS" +LIBS="-lpthread $original_LIBS" + +cat confdefs.h - <<_ACEOF >conftest.$ac_ext +/* end confdefs.h. */ + + #include +int +main () +{ +pthread_create((void *)0, (void *)0, (void *)0, (void *)0) + ; + return 0; +} +_ACEOF +if ac_fn_cxx_try_link "$LINENO"; then : + pthread=yes else - pkg_failed=yes + pthread=no fi - else - pkg_failed=untried +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext + +if test x${pthread} = xyes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } +else + LIBS="$original_LIBS" + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi -if test -n "$JS_LIBS"; then - pkg_cv_JS_LIBS="$JS_LIBS" - elif test -n "$PKG_CONFIG"; then - if test -n "$PKG_CONFIG" && \ - { (echo "$as_me:$LINENO: \$PKG_CONFIG --exists --print-errors \"mozilla-js >= 1.7\"") >&5 - ($PKG_CONFIG --exists --print-errors "mozilla-js >= 1.7") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then - pkg_cv_JS_LIBS=`$PKG_CONFIG --libs "mozilla-js >= 1.7" 2>/dev/null` + +# Extract the first word of "erl", so it can be a program name with args. +set dummy erl; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_path_ERL+:} false; then : + $as_echo_n "(cached) " >&6 else - pkg_failed=yes + case $ERL in + [\\/]* | ?:[\\/]*) + ac_cv_path_ERL="$ERL" # Let the user override the test with a path. + ;; + *) + as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_path_ERL="$as_dir/$ac_word$ac_exec_ext" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + + ;; +esac fi - else - pkg_failed=untried +ERL=$ac_cv_path_ERL +if test -n "$ERL"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ERL" >&5 +$as_echo "$ERL" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi -if test $pkg_failed = yes; then - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } - -if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then - _pkg_short_errors_supported=yes -else - _pkg_short_errors_supported=no -fi - if test $_pkg_short_errors_supported = yes; then - JS_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors "mozilla-js >= 1.7" 2>&1` - else - JS_PKG_ERRORS=`$PKG_CONFIG --print-errors "mozilla-js >= 1.7" 2>&1` - fi - # Put the nasty error message in config.log where it belongs - echo "$JS_PKG_ERRORS" >&5 +if test x${ERL} = x; then : + as_fn_error $? "Could not find the \`erl' executable. Is Erlang installed?" "$LINENO" 5 - JS_LIB_DIR="${libdir}" +fi -elif test $pkg_failed = untried; then - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } - JS_LIB_DIR="${libdir}" +# Check whether --with-erlang was given. +if test "${with_erlang+set}" = set; then : + withval=$with_erlang; + ERLANG_FLAGS="-I$withval" else - JS_CFLAGS=$pkg_cv_JS_CFLAGS - JS_LIBS=$pkg_cv_JS_LIBS - { echo "$as_me:$LINENO: result: yes" >&5 -echo "${ECHO_T}yes" >&6; } - JS_LIB_DIR="$(${PKG_CONFIG} --variable=sdkdir mozilla-js)/lib" + realerl=`readlink -f $ERL 2>/dev/null` + if test $? -eq 0; then : -fi + erlbase=`dirname $realerl` + erlbase=`dirname $erlbase` + ERLANG_FLAGS="-I${erlbase}/usr/include" else - JS185_CFLAGS=$pkg_cv_JS185_CFLAGS - JS185_LIBS=$pkg_cv_JS185_LIBS - { echo "$as_me:$LINENO: result: yes" >&5 -echo "${ECHO_T}yes" >&6; } - JS_LIB_DIR="$(${PKG_CONFIG} --variable=libdir mozjs185)" + # Failed to figure out where erl is installed.. + # try to add some default directories to search + ERLANG_FLAGS="-I${libdir}/erlang/usr/include" + ERLANG_FLAGS="$ERLANG_FLAGS -I/usr/lib/erlang/usr/include" + ERLANG_FLAGS="$ERLANG_FLAGS -I/usr/local/lib/erlang/usr/include" + ERLANG_FLAGS="$ERLANG_FLAGS -I/opt/local/lib/erlang/usr/include" fi fi -JS_LDFLAGS="-L$JS_LIB_DIR $LDFLAGS" -# Check whether --with-js-include was given. -if test "${with_js_include+set}" = set; then - withval=$with_js_include; - JS_INCLUDE="$withval" - JS_CFLAGS="-I$JS_INCLUDE" - JS_CFLAGS="$JS_CFLAGS -I$JS_INCLUDE/js" - JS_CFLAGS="$JS_CFLAGS -I$JS_INCLUDE/mozjs" - -else - pkg_failed=no -{ echo "$as_me:$LINENO: checking for JS185" >&5 -echo $ECHO_N "checking for JS185... $ECHO_C" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for JS" >&5 +$as_echo_n "checking for JS... " >&6; } -if test -n "$JS185_CFLAGS"; then - pkg_cv_JS185_CFLAGS="$JS185_CFLAGS" +if test -n "$JS_CFLAGS"; then + pkg_cv_JS_CFLAGS="$JS_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ - { (echo "$as_me:$LINENO: \$PKG_CONFIG --exists --print-errors \"mozjs185\"") >&5 + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"mozjs185\""; } >&5 ($PKG_CONFIG --exists --print-errors "mozjs185") 2>&5 ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then - pkg_cv_JS185_CFLAGS=`$PKG_CONFIG --cflags "mozjs185" 2>/dev/null` + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_JS_CFLAGS=`$PKG_CONFIG --cflags "mozjs185" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi -if test -n "$JS185_LIBS"; then - pkg_cv_JS185_LIBS="$JS185_LIBS" +if test -n "$JS_LIBS"; then + pkg_cv_JS_LIBS="$JS_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ - { (echo "$as_me:$LINENO: \$PKG_CONFIG --exists --print-errors \"mozjs185\"") >&5 + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"mozjs185\""; } >&5 ($PKG_CONFIG --exists --print-errors "mozjs185") 2>&5 ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then - pkg_cv_JS185_LIBS=`$PKG_CONFIG --libs "mozjs185" 2>/dev/null` + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_JS_LIBS=`$PKG_CONFIG --libs "mozjs185" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi @@ -16430,8 +17070,8 @@ if test $pkg_failed = yes; then - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes @@ -16439,29 +17079,30 @@ _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - JS185_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors "mozjs185" 2>&1` + JS_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "mozjs185" 2>&1` else - JS185_PKG_ERRORS=`$PKG_CONFIG --print-errors "mozjs185" 2>&1` + JS_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "mozjs185" 2>&1` fi # Put the nasty error message in config.log where it belongs - echo "$JS185_PKG_ERRORS" >&5 + echo "$JS_PKG_ERRORS" >&5 pkg_failed=no -{ echo "$as_me:$LINENO: checking for JS" >&5 -echo $ECHO_N "checking for JS... $ECHO_C" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for JS" >&5 +$as_echo_n "checking for JS... " >&6; } if test -n "$JS_CFLAGS"; then pkg_cv_JS_CFLAGS="$JS_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ - { (echo "$as_me:$LINENO: \$PKG_CONFIG --exists --print-errors \"mozilla-js >= 1.7\"") >&5 + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"mozilla-js >= 1.7\""; } >&5 ($PKG_CONFIG --exists --print-errors "mozilla-js >= 1.7") 2>&5 ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then pkg_cv_JS_CFLAGS=`$PKG_CONFIG --cflags "mozilla-js >= 1.7" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi @@ -16472,12 +17113,13 @@ pkg_cv_JS_LIBS="$JS_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ - { (echo "$as_me:$LINENO: \$PKG_CONFIG --exists --print-errors \"mozilla-js >= 1.7\"") >&5 + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"mozilla-js >= 1.7\""; } >&5 ($PKG_CONFIG --exists --print-errors "mozilla-js >= 1.7") 2>&5 ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then pkg_cv_JS_LIBS=`$PKG_CONFIG --libs "mozilla-js >= 1.7" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi @@ -16488,8 +17130,8 @@ if test $pkg_failed = yes; then - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes @@ -16497,59 +17139,62 @@ _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - JS_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors "mozilla-js >= 1.7" 2>&1` + JS_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "mozilla-js >= 1.7" 2>&1` else - JS_PKG_ERRORS=`$PKG_CONFIG --print-errors "mozilla-js >= 1.7" 2>&1` + JS_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "mozilla-js >= 1.7" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$JS_PKG_ERRORS" >&5 - JS_CFLAGS="-I/usr/include" - JS_CFLAGS="$JS_CFLAGS -I/usr/include/js" - JS_CFLAGS="$JS_CFLAGS -I/usr/include/mozjs" - JS_CFLAGS="$JS_CFLAGS -I/usr/local/include/js" - JS_CFLAGS="$JS_CFLAGS -I/opt/local/include/js" + JS_LIB_DIR="${libdir}" + JS_CFLAGS="-I/usr/include" + JS_CFLAGS="$JS_CFLAGS -I/usr/include/js" + JS_CFLAGS="$JS_CFLAGS -I/usr/include/mozjs" + JS_CFLAGS="$JS_CFLAGS -I/usr/local/include/js" + JS_CFLAGS="$JS_CFLAGS -I/opt/local/include/js" elif test $pkg_failed = untried; then - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } - JS_CFLAGS="-I/usr/include" - JS_CFLAGS="$JS_CFLAGS -I/usr/include/js" - JS_CFLAGS="$JS_CFLAGS -I/usr/include/mozjs" - JS_CFLAGS="$JS_CFLAGS -I/usr/local/include/js" - JS_CFLAGS="$JS_CFLAGS -I/opt/local/include/js" + JS_LIB_DIR="${libdir}" + JS_CFLAGS="-I/usr/include" + JS_CFLAGS="$JS_CFLAGS -I/usr/include/js" + JS_CFLAGS="$JS_CFLAGS -I/usr/include/mozjs" + JS_CFLAGS="$JS_CFLAGS -I/usr/local/include/js" + JS_CFLAGS="$JS_CFLAGS -I/opt/local/include/js" else JS_CFLAGS=$pkg_cv_JS_CFLAGS JS_LIBS=$pkg_cv_JS_LIBS - { echo "$as_me:$LINENO: result: yes" >&5 -echo "${ECHO_T}yes" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } - JS_CFLAGS="$(${PKG_CONFIG} --cflags mozilla-js)" + JS_LIB_DIR="$(${PKG_CONFIG} --variable=sdkdir mozilla-js)/lib" fi elif test $pkg_failed = untried; then - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } pkg_failed=no -{ echo "$as_me:$LINENO: checking for JS" >&5 -echo $ECHO_N "checking for JS... $ECHO_C" >&6; } +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for JS" >&5 +$as_echo_n "checking for JS... " >&6; } if test -n "$JS_CFLAGS"; then pkg_cv_JS_CFLAGS="$JS_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ - { (echo "$as_me:$LINENO: \$PKG_CONFIG --exists --print-errors \"mozilla-js >= 1.7\"") >&5 + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"mozilla-js >= 1.7\""; } >&5 ($PKG_CONFIG --exists --print-errors "mozilla-js >= 1.7") 2>&5 ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then pkg_cv_JS_CFLAGS=`$PKG_CONFIG --cflags "mozilla-js >= 1.7" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi @@ -16559,509 +17204,201 @@ if test -n "$JS_LIBS"; then pkg_cv_JS_LIBS="$JS_LIBS" elif test -n "$PKG_CONFIG"; then - if test -n "$PKG_CONFIG" && \ - { (echo "$as_me:$LINENO: \$PKG_CONFIG --exists --print-errors \"mozilla-js >= 1.7\"") >&5 - ($PKG_CONFIG --exists --print-errors "mozilla-js >= 1.7") 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; then - pkg_cv_JS_LIBS=`$PKG_CONFIG --libs "mozilla-js >= 1.7" 2>/dev/null` -else - pkg_failed=yes -fi - else - pkg_failed=untried -fi - - - -if test $pkg_failed = yes; then - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } - -if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then - _pkg_short_errors_supported=yes -else - _pkg_short_errors_supported=no -fi - if test $_pkg_short_errors_supported = yes; then - JS_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors "mozilla-js >= 1.7" 2>&1` - else - JS_PKG_ERRORS=`$PKG_CONFIG --print-errors "mozilla-js >= 1.7" 2>&1` - fi - # Put the nasty error message in config.log where it belongs - echo "$JS_PKG_ERRORS" >&5 - - - JS_CFLAGS="-I/usr/include" - JS_CFLAGS="$JS_CFLAGS -I/usr/include/js" - JS_CFLAGS="$JS_CFLAGS -I/usr/include/mozjs" - JS_CFLAGS="$JS_CFLAGS -I/usr/local/include/js" - JS_CFLAGS="$JS_CFLAGS -I/opt/local/include/js" - -elif test $pkg_failed = untried; then - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } - - JS_CFLAGS="-I/usr/include" - JS_CFLAGS="$JS_CFLAGS -I/usr/include/js" - JS_CFLAGS="$JS_CFLAGS -I/usr/include/mozjs" - JS_CFLAGS="$JS_CFLAGS -I/usr/local/include/js" - JS_CFLAGS="$JS_CFLAGS -I/opt/local/include/js" - -else - JS_CFLAGS=$pkg_cv_JS_CFLAGS - JS_LIBS=$pkg_cv_JS_LIBS - { echo "$as_me:$LINENO: result: yes" >&5 -echo "${ECHO_T}yes" >&6; } - - JS_CFLAGS="$(${PKG_CONFIG} --cflags mozilla-js)" - -fi - -else - JS185_CFLAGS=$pkg_cv_JS185_CFLAGS - JS185_LIBS=$pkg_cv_JS185_LIBS - { echo "$as_me:$LINENO: result: yes" >&5 -echo "${ECHO_T}yes" >&6; } - - JS_CFLAGS="$(${PKG_CONFIG} --cflags mozjs185)" - -fi - -fi - - -use_js_trunk=no -# Check whether --enable-js-trunk was given. -if test "${enable_js_trunk+set}" = set; then - enableval=$enable_js_trunk; - use_js_trunk=$enableval - -fi - - -# The erlang cc.sh/ld.sh scripts will convert a -O option -# into the same optimization flags erlang itself uses. -CFLAGS="-O2 $CFLAGS" - -LIBS="$LIBS $LDFLAGS" - -case $(uname -s) in - CYGWIN*) - JS_CFLAGS="-DXP_WIN $JS_CFLAGS" - IS_WINDOWS="TRUE" - ;; - *) - # XP_UNIX required for jsapi.h and has been tested to work on Linux and Darwin. - JS_CFLAGS="-DXP_UNIX $JS_CFLAGS" - ;; -esac - - - if test x$IS_WINDOWS = xTRUE; then - WINDOWS_TRUE= - WINDOWS_FALSE='#' -else - WINDOWS_TRUE='#' - WINDOWS_FALSE= -fi - - -OLD_CPPFLAGS="$CPPFLAGS" -OLD_LDFLAGS="$LDFLAGS" -OLD_LIBS="$LIBS" -CPPFLAGS="$JS_CFLAGS $CPPFLAGS" -LDFLAGS="$JS_LDFLAGS" -LIBS="$JS_LIBS $LIBS" - -if test "${ac_cv_header_jsapi_h+set}" = set; then - { echo "$as_me:$LINENO: checking for jsapi.h" >&5 -echo $ECHO_N "checking for jsapi.h... $ECHO_C" >&6; } -if test "${ac_cv_header_jsapi_h+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -fi -{ echo "$as_me:$LINENO: result: $ac_cv_header_jsapi_h" >&5 -echo "${ECHO_T}$ac_cv_header_jsapi_h" >&6; } -else - # Is the header compilable? -{ echo "$as_me:$LINENO: checking jsapi.h usability" >&5 -echo $ECHO_N "checking jsapi.h usability... $ECHO_C" >&6; } -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -$ac_includes_default -#include -_ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then - ac_header_compiler=yes -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_header_compiler=no -fi - -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -{ echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 -echo "${ECHO_T}$ac_header_compiler" >&6; } - -# Is the header present? -{ echo "$as_me:$LINENO: checking jsapi.h presence" >&5 -echo $ECHO_N "checking jsapi.h presence... $ECHO_C" >&6; } -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -#include -_ACEOF -if { (ac_try="$ac_cpp conftest.$ac_ext" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_cpp conftest.$ac_ext") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } >/dev/null && { - test -z "$ac_cxx_preproc_warn_flag$ac_cxx_werror_flag" || - test ! -s conftest.err - }; then - ac_header_preproc=yes + if test -n "$PKG_CONFIG" && \ + { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"mozilla-js >= 1.7\""; } >&5 + ($PKG_CONFIG --exists --print-errors "mozilla-js >= 1.7") 2>&5 + ac_status=$? + $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 + test $ac_status = 0; }; then + pkg_cv_JS_LIBS=`$PKG_CONFIG --libs "mozilla-js >= 1.7" 2>/dev/null` + test "x$?" != "x0" && pkg_failed=yes else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_header_preproc=no + pkg_failed=yes +fi + else + pkg_failed=untried fi -rm -f conftest.err conftest.$ac_ext -{ echo "$as_me:$LINENO: result: $ac_header_preproc" >&5 -echo "${ECHO_T}$ac_header_preproc" >&6; } -# So? What about this header? -case $ac_header_compiler:$ac_header_preproc:$ac_cxx_preproc_warn_flag in - yes:no: ) - { echo "$as_me:$LINENO: WARNING: jsapi.h: accepted by the compiler, rejected by the preprocessor!" >&5 -echo "$as_me: WARNING: jsapi.h: accepted by the compiler, rejected by the preprocessor!" >&2;} - { echo "$as_me:$LINENO: WARNING: jsapi.h: proceeding with the compiler's result" >&5 -echo "$as_me: WARNING: jsapi.h: proceeding with the compiler's result" >&2;} - ac_header_preproc=yes - ;; - no:yes:* ) - { echo "$as_me:$LINENO: WARNING: jsapi.h: present but cannot be compiled" >&5 -echo "$as_me: WARNING: jsapi.h: present but cannot be compiled" >&2;} - { echo "$as_me:$LINENO: WARNING: jsapi.h: check for missing prerequisite headers?" >&5 -echo "$as_me: WARNING: jsapi.h: check for missing prerequisite headers?" >&2;} - { echo "$as_me:$LINENO: WARNING: jsapi.h: see the Autoconf documentation" >&5 -echo "$as_me: WARNING: jsapi.h: see the Autoconf documentation" >&2;} - { echo "$as_me:$LINENO: WARNING: jsapi.h: section \"Present But Cannot Be Compiled\"" >&5 -echo "$as_me: WARNING: jsapi.h: section \"Present But Cannot Be Compiled\"" >&2;} - { echo "$as_me:$LINENO: WARNING: jsapi.h: proceeding with the preprocessor's result" >&5 -echo "$as_me: WARNING: jsapi.h: proceeding with the preprocessor's result" >&2;} - { echo "$as_me:$LINENO: WARNING: jsapi.h: in the future, the compiler will take precedence" >&5 -echo "$as_me: WARNING: jsapi.h: in the future, the compiler will take precedence" >&2;} - ( cat <<\_ASBOX -## ------------------------------------------------------------ ## -## Report this to https://issues.apache.org/jira/browse/COUCHDB ## -## ------------------------------------------------------------ ## -_ASBOX - ) | sed "s/^/$as_me: WARNING: /" >&2 - ;; -esac -{ echo "$as_me:$LINENO: checking for jsapi.h" >&5 -echo $ECHO_N "checking for jsapi.h... $ECHO_C" >&6; } -if test "${ac_cv_header_jsapi_h+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + +if test $pkg_failed = yes; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + +if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then + _pkg_short_errors_supported=yes else - ac_cv_header_jsapi_h=$ac_header_preproc + _pkg_short_errors_supported=no fi -{ echo "$as_me:$LINENO: result: $ac_cv_header_jsapi_h" >&5 -echo "${ECHO_T}$ac_cv_header_jsapi_h" >&6; } + if test $_pkg_short_errors_supported = yes; then + JS_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "mozilla-js >= 1.7" 2>&1` + else + JS_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "mozilla-js >= 1.7" 2>&1` + fi + # Put the nasty error message in config.log where it belongs + echo "$JS_PKG_ERRORS" >&5 + + + JS_LIB_DIR="${libdir}" + JS_CFLAGS="-I/usr/include" + JS_CFLAGS="$JS_CFLAGS -I/usr/include/js" + JS_CFLAGS="$JS_CFLAGS -I/usr/include/mozjs" + JS_CFLAGS="$JS_CFLAGS -I/usr/local/include/js" + JS_CFLAGS="$JS_CFLAGS -I/opt/local/include/js" + +elif test $pkg_failed = untried; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + + JS_LIB_DIR="${libdir}" + JS_CFLAGS="-I/usr/include" + JS_CFLAGS="$JS_CFLAGS -I/usr/include/js" + JS_CFLAGS="$JS_CFLAGS -I/usr/include/mozjs" + JS_CFLAGS="$JS_CFLAGS -I/usr/local/include/js" + JS_CFLAGS="$JS_CFLAGS -I/opt/local/include/js" -fi -if test $ac_cv_header_jsapi_h = yes; then - : else + JS_CFLAGS=$pkg_cv_JS_CFLAGS + JS_LIBS=$pkg_cv_JS_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + + JS_LIB_DIR="$(${PKG_CONFIG} --variable=sdkdir mozilla-js)/lib" - if test "${ac_cv_header_js_jsapi_h+set}" = set; then - { echo "$as_me:$LINENO: checking for js/jsapi.h" >&5 -echo $ECHO_N "checking for js/jsapi.h... $ECHO_C" >&6; } -if test "${ac_cv_header_js_jsapi_h+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 fi -{ echo "$as_me:$LINENO: result: $ac_cv_header_js_jsapi_h" >&5 -echo "${ECHO_T}$ac_cv_header_js_jsapi_h" >&6; } -else - # Is the header compilable? -{ echo "$as_me:$LINENO: checking js/jsapi.h usability" >&5 -echo $ECHO_N "checking js/jsapi.h usability... $ECHO_C" >&6; } -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -$ac_includes_default -#include -_ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then - ac_header_compiler=yes + else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 + JS_CFLAGS=$pkg_cv_JS_CFLAGS + JS_LIBS=$pkg_cv_JS_LIBS + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + + JS_LIB_DIR="$(${PKG_CONFIG} --variable=libdir mozjs185)" - ac_header_compiler=no fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -{ echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 -echo "${ECHO_T}$ac_header_compiler" >&6; } -# Is the header present? -{ echo "$as_me:$LINENO: checking js/jsapi.h presence" >&5 -echo $ECHO_N "checking js/jsapi.h presence... $ECHO_C" >&6; } -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -#include -_ACEOF -if { (ac_try="$ac_cpp conftest.$ac_ext" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_cpp conftest.$ac_ext") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } >/dev/null && { - test -z "$ac_cxx_preproc_warn_flag$ac_cxx_werror_flag" || - test ! -s conftest.err - }; then - ac_header_preproc=yes -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 +# Check whether --with-js-include was given. +if test "${with_js_include+set}" = set; then : + withval=$with_js_include; + JS_INCLUDE="$withval" + JS_CFLAGS="-I$JS_INCLUDE" - ac_header_preproc=no fi -rm -f conftest.err conftest.$ac_ext -{ echo "$as_me:$LINENO: result: $ac_header_preproc" >&5 -echo "${ECHO_T}$ac_header_preproc" >&6; } -# So? What about this header? -case $ac_header_compiler:$ac_header_preproc:$ac_cxx_preproc_warn_flag in - yes:no: ) - { echo "$as_me:$LINENO: WARNING: js/jsapi.h: accepted by the compiler, rejected by the preprocessor!" >&5 -echo "$as_me: WARNING: js/jsapi.h: accepted by the compiler, rejected by the preprocessor!" >&2;} - { echo "$as_me:$LINENO: WARNING: js/jsapi.h: proceeding with the compiler's result" >&5 -echo "$as_me: WARNING: js/jsapi.h: proceeding with the compiler's result" >&2;} - ac_header_preproc=yes - ;; - no:yes:* ) - { echo "$as_me:$LINENO: WARNING: js/jsapi.h: present but cannot be compiled" >&5 -echo "$as_me: WARNING: js/jsapi.h: present but cannot be compiled" >&2;} - { echo "$as_me:$LINENO: WARNING: js/jsapi.h: check for missing prerequisite headers?" >&5 -echo "$as_me: WARNING: js/jsapi.h: check for missing prerequisite headers?" >&2;} - { echo "$as_me:$LINENO: WARNING: js/jsapi.h: see the Autoconf documentation" >&5 -echo "$as_me: WARNING: js/jsapi.h: see the Autoconf documentation" >&2;} - { echo "$as_me:$LINENO: WARNING: js/jsapi.h: section \"Present But Cannot Be Compiled\"" >&5 -echo "$as_me: WARNING: js/jsapi.h: section \"Present But Cannot Be Compiled\"" >&2;} - { echo "$as_me:$LINENO: WARNING: js/jsapi.h: proceeding with the preprocessor's result" >&5 -echo "$as_me: WARNING: js/jsapi.h: proceeding with the preprocessor's result" >&2;} - { echo "$as_me:$LINENO: WARNING: js/jsapi.h: in the future, the compiler will take precedence" >&5 -echo "$as_me: WARNING: js/jsapi.h: in the future, the compiler will take precedence" >&2;} - ( cat <<\_ASBOX -## ------------------------------------------------------------ ## -## Report this to https://issues.apache.org/jira/browse/COUCHDB ## -## ------------------------------------------------------------ ## -_ASBOX - ) | sed "s/^/$as_me: WARNING: /" >&2 - ;; -esac -{ echo "$as_me:$LINENO: checking for js/jsapi.h" >&5 -echo $ECHO_N "checking for js/jsapi.h... $ECHO_C" >&6; } -if test "${ac_cv_header_js_jsapi_h+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - ac_cv_header_js_jsapi_h=$ac_header_preproc -fi -{ echo "$as_me:$LINENO: result: $ac_cv_header_js_jsapi_h" >&5 -echo "${ECHO_T}$ac_cv_header_js_jsapi_h" >&6; } + +# Check whether --with-js-lib was given. +if test "${with_js_lib+set}" = set; then : + withval=$with_js_lib; + JS_LIB_DIR=$withval + JS_LIBS="-L$withval" fi -if test $ac_cv_header_js_jsapi_h = yes; then - CPPFLAGS="$CPPFLAGS -I$JS_INCLUDE/js" -else +use_js_trunk=no - { { echo "$as_me:$LINENO: error: Could not find the jsapi header. +# Check whether --enable-js-trunk was given. +if test "${enable_js_trunk+set}" = set; then : + enableval=$enable_js_trunk; + use_js_trunk=$enableval -Are the Mozilla SpiderMonkey headers installed?" >&5 -echo "$as_me: error: Could not find the jsapi header. +fi -Are the Mozilla SpiderMonkey headers installed?" >&2;} - { (exit 1); exit 1; }; } -fi -fi +case $(uname -s) in #( + CYGWIN*) : + ;; #( + *) : + CPPFLAGS="$CPPFLAGS -I/opt/local/include" + CPPFLAGS="$CPPFLAGS -I/opt/local/include/js" + CPPFLAGS="$CPPFLAGS -I/usr/local/include" + CPPFLAGS="$CPPFLAGS -I/usr/local/include/js" + CPPFLAGS="$CPPFLAGS -I/usr/include" + CPPFLAGS="$CPPFLAGS -I/usr/include/js" + LDFLAGS="$LDFLAGS -L/opt/local/lib" + LDFLAGS="$LDFLAGS -L/usr/local/lib" + ;; #( + *) : + ;; +esac +CPPFLAGS="$CPPFLAGS $FLAGS" +LDFLAGS="$LDFLAGS $FLAGS" -# Check whether --with-js-lib-name was given. -if test "${with_js_lib_name+set}" = set; then - withval=$with_js_lib_name; - JS_LIB_BASE="$withval" +# The erlang cc.sh/ld.sh scripts will convert a -O option +# into the same optimization flags erlang itself uses. +CFLAGS="-O2 $CFLAGS" -as_ac_Lib=`echo "ac_cv_lib_$JS_LIB_BASE''_JS_NewObject" | $as_tr_sh` -{ echo "$as_me:$LINENO: checking for JS_NewObject in -l$JS_LIB_BASE" >&5 -echo $ECHO_N "checking for JS_NewObject in -l$JS_LIB_BASE... $ECHO_C" >&6; } -if { as_var=$as_ac_Lib; eval "test \"\${$as_var+set}\" = set"; }; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - ac_check_lib_save_LIBS=$LIBS -LIBS="-l$JS_LIB_BASE $LIBS" -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ +LIBS="$LIBS $LDFLAGS" -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char JS_NewObject (); -int -main () -{ -return JS_NewObject (); - ; - return 0; -} -_ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; +case $(uname -s) in #( + CYGWIN*) : + + JS_CFLAGS="-DXP_WIN $JS_CFLAGS" + IS_WINDOWS="TRUE" + ;; #( + *) : + + # XP_UNIX required for jsapi.h, tested on Linux and Darwin. + JS_CFLAGS="-DXP_UNIX $JS_CFLAGS" + ;; #( + *) : + ;; esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then - eval "$as_ac_Lib=yes" -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - eval "$as_ac_Lib=no" + if test x$IS_WINDOWS = xTRUE; then + WINDOWS_TRUE= + WINDOWS_FALSE='#' +else + WINDOWS_TRUE='#' + WINDOWS_FALSE= fi -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS -fi -ac_res=`eval echo '${'$as_ac_Lib'}'` - { echo "$as_me:$LINENO: result: $ac_res" >&5 -echo "${ECHO_T}$ac_res" >&6; } -if test `eval echo '${'$as_ac_Lib'}'` = yes; then - cat >>confdefs.h <<_ACEOF -#define `echo "HAVE_LIB$JS_LIB_BASE" | $as_tr_cpp` 1 -_ACEOF - LIBS="-l$JS_LIB_BASE $LIBS" +OLD_LIBS="$LIBS" +LIBS="$JS_LIBS $LIBS" + +OLD_CPPFLAGS="$CPPFLAGS" +CPPFLAGS="$JS_CFLAGS $CPPFLAGS" + +ac_fn_cxx_check_header_mongrel "$LINENO" "jsapi.h" "ac_cv_header_jsapi_h" "$ac_includes_default" +if test "x$ac_cv_header_jsapi_h" = xyes; then : + +else + + ac_fn_cxx_check_header_mongrel "$LINENO" "js/jsapi.h" "ac_cv_header_js_jsapi_h" "$ac_includes_default" +if test "x$ac_cv_header_js_jsapi_h" = xyes; then : + + CPPFLAGS="$CPPFLAGS -I$JS_INCLUDE/js" else - { { echo "$as_me:$LINENO: error: Could not find the Spidermonkey library. + as_fn_error $? "Could not find the jsapi header. -Did you specify the correct library name?" >&5 -echo "$as_me: error: Could not find the Spidermonkey library. +Are the Mozilla SpiderMonkey headers installed?" "$LINENO" 5 -Did you specify the correct library name?" >&2;} - { (exit 1); exit 1; }; } fi -else - { echo "$as_me:$LINENO: checking for JS_NewObject in -lmozjs" >&5 -echo $ECHO_N "checking for JS_NewObject in -lmozjs... $ECHO_C" >&6; } -if test "${ac_cv_lib_mozjs_JS_NewObject+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +fi + + + +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for JS_NewContext in -lmozjs185" >&5 +$as_echo_n "checking for JS_NewContext in -lmozjs185... " >&6; } +if ${ac_cv_lib_mozjs185_JS_NewContext+:} false; then : + $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS -LIBS="-lmozjs $LIBS" -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF +LIBS="-lmozjs185 $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. @@ -17070,63 +17407,38 @@ #ifdef __cplusplus extern "C" #endif -char JS_NewObject (); +char JS_NewContext (); int main () { -return JS_NewObject (); +return JS_NewContext (); ; return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then - ac_cv_lib_mozjs_JS_NewObject=yes +if ac_fn_cxx_try_link "$LINENO"; then : + ac_cv_lib_mozjs185_JS_NewContext=yes else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_cv_lib_mozjs_JS_NewObject=no + ac_cv_lib_mozjs185_JS_NewContext=no fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi -{ echo "$as_me:$LINENO: result: $ac_cv_lib_mozjs_JS_NewObject" >&5 -echo "${ECHO_T}$ac_cv_lib_mozjs_JS_NewObject" >&6; } -if test $ac_cv_lib_mozjs_JS_NewObject = yes; then - JS_LIB_BASE=mozjs +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_mozjs185_JS_NewContext" >&5 +$as_echo "$ac_cv_lib_mozjs185_JS_NewContext" >&6; } +if test "x$ac_cv_lib_mozjs185_JS_NewContext" = xyes; then : + JS_LIB_BASE=mozjs185 else - { echo "$as_me:$LINENO: checking for JS_NewObject in -ljs" >&5 -echo $ECHO_N "checking for JS_NewObject in -ljs... $ECHO_C" >&6; } -if test "${ac_cv_lib_js_JS_NewObject+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for JS_NewContext in -lmozjs185-1.0" >&5 +$as_echo_n "checking for JS_NewContext in -lmozjs185-1.0... " >&6; } +if ${ac_cv_lib_mozjs185_1_0_JS_NewContext+:} false; then : + $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS -LIBS="-ljs $LIBS" -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF +LIBS="-lmozjs185-1.0 $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. @@ -17135,63 +17447,38 @@ #ifdef __cplusplus extern "C" #endif -char JS_NewObject (); +char JS_NewContext (); int main () { -return JS_NewObject (); +return JS_NewContext (); ; return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then - ac_cv_lib_js_JS_NewObject=yes +if ac_fn_cxx_try_link "$LINENO"; then : + ac_cv_lib_mozjs185_1_0_JS_NewContext=yes else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_cv_lib_js_JS_NewObject=no + ac_cv_lib_mozjs185_1_0_JS_NewContext=no fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi -{ echo "$as_me:$LINENO: result: $ac_cv_lib_js_JS_NewObject" >&5 -echo "${ECHO_T}$ac_cv_lib_js_JS_NewObject" >&6; } -if test $ac_cv_lib_js_JS_NewObject = yes; then - JS_LIB_BASE=js +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_mozjs185_1_0_JS_NewContext" >&5 +$as_echo "$ac_cv_lib_mozjs185_1_0_JS_NewContext" >&6; } +if test "x$ac_cv_lib_mozjs185_1_0_JS_NewContext" = xyes; then : + JS_LIB_BASE=mozjs185-1.0 else - { echo "$as_me:$LINENO: checking for JS_NewObject in -ljs3250" >&5 -echo $ECHO_N "checking for JS_NewObject in -ljs3250... $ECHO_C" >&6; } -if test "${ac_cv_lib_js3250_JS_NewObject+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for JS_NewContext in -lmozjs" >&5 +$as_echo_n "checking for JS_NewContext in -lmozjs... " >&6; } +if ${ac_cv_lib_mozjs_JS_NewContext+:} false; then : + $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS -LIBS="-ljs3250 $LIBS" -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF +LIBS="-lmozjs $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. @@ -17200,63 +17487,38 @@ #ifdef __cplusplus extern "C" #endif -char JS_NewObject (); +char JS_NewContext (); int main () { -return JS_NewObject (); +return JS_NewContext (); ; return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then - ac_cv_lib_js3250_JS_NewObject=yes +if ac_fn_cxx_try_link "$LINENO"; then : + ac_cv_lib_mozjs_JS_NewContext=yes else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_cv_lib_js3250_JS_NewObject=no + ac_cv_lib_mozjs_JS_NewContext=no fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi -{ echo "$as_me:$LINENO: result: $ac_cv_lib_js3250_JS_NewObject" >&5 -echo "${ECHO_T}$ac_cv_lib_js3250_JS_NewObject" >&6; } -if test $ac_cv_lib_js3250_JS_NewObject = yes; then - JS_LIB_BASE=js3250 +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_mozjs_JS_NewContext" >&5 +$as_echo "$ac_cv_lib_mozjs_JS_NewContext" >&6; } +if test "x$ac_cv_lib_mozjs_JS_NewContext" = xyes; then : + JS_LIB_BASE=mozjs else - { echo "$as_me:$LINENO: checking for JS_NewObject in -ljs32" >&5 -echo $ECHO_N "checking for JS_NewObject in -ljs32... $ECHO_C" >&6; } -if test "${ac_cv_lib_js32_JS_NewObject+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for JS_NewContext in -ljs" >&5 +$as_echo_n "checking for JS_NewContext in -ljs... " >&6; } +if ${ac_cv_lib_js_JS_NewContext+:} false; then : + $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS -LIBS="-ljs32 $LIBS" -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF +LIBS="-ljs $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. @@ -17265,63 +17527,38 @@ #ifdef __cplusplus extern "C" #endif -char JS_NewObject (); +char JS_NewContext (); int main () { -return JS_NewObject (); +return JS_NewContext (); ; return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then - ac_cv_lib_js32_JS_NewObject=yes +if ac_fn_cxx_try_link "$LINENO"; then : + ac_cv_lib_js_JS_NewContext=yes else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_cv_lib_js32_JS_NewObject=no + ac_cv_lib_js_JS_NewContext=no fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi -{ echo "$as_me:$LINENO: result: $ac_cv_lib_js32_JS_NewObject" >&5 -echo "${ECHO_T}$ac_cv_lib_js32_JS_NewObject" >&6; } -if test $ac_cv_lib_js32_JS_NewObject = yes; then - JS_LIB_BASE=js32 +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_js_JS_NewContext" >&5 +$as_echo "$ac_cv_lib_js_JS_NewContext" >&6; } +if test "x$ac_cv_lib_js_JS_NewContext" = xyes; then : + JS_LIB_BASE=js else - { echo "$as_me:$LINENO: checking for JS_NewObject in -lmozjs185-1.0" >&5 -echo $ECHO_N "checking for JS_NewObject in -lmozjs185-1.0... $ECHO_C" >&6; } -if test "${ac_cv_lib_mozjs185_1_0_JS_NewObject+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for JS_NewContext in -ljs3250" >&5 +$as_echo_n "checking for JS_NewContext in -ljs3250... " >&6; } +if ${ac_cv_lib_js3250_JS_NewContext+:} false; then : + $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS -LIBS="-lmozjs185-1.0 $LIBS" -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF +LIBS="-ljs3250 $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. @@ -17330,63 +17567,38 @@ #ifdef __cplusplus extern "C" #endif -char JS_NewObject (); +char JS_NewContext (); int main () { -return JS_NewObject (); +return JS_NewContext (); ; return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then - ac_cv_lib_mozjs185_1_0_JS_NewObject=yes +if ac_fn_cxx_try_link "$LINENO"; then : + ac_cv_lib_js3250_JS_NewContext=yes else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_cv_lib_mozjs185_1_0_JS_NewObject=no + ac_cv_lib_js3250_JS_NewContext=no fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi -{ echo "$as_me:$LINENO: result: $ac_cv_lib_mozjs185_1_0_JS_NewObject" >&5 -echo "${ECHO_T}$ac_cv_lib_mozjs185_1_0_JS_NewObject" >&6; } -if test $ac_cv_lib_mozjs185_1_0_JS_NewObject = yes; then - JS_LIB_BASE=mozjs185-1.0 +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_js3250_JS_NewContext" >&5 +$as_echo "$ac_cv_lib_js3250_JS_NewContext" >&6; } +if test "x$ac_cv_lib_js3250_JS_NewContext" = xyes; then : + JS_LIB_BASE=js3250 else - { echo "$as_me:$LINENO: checking for JS_NewObject in -lmozjs185" >&5 -echo $ECHO_N "checking for JS_NewObject in -lmozjs185... $ECHO_C" >&6; } -if test "${ac_cv_lib_mozjs185_JS_NewObject+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for JS_NewContext in -ljs32" >&5 +$as_echo_n "checking for JS_NewContext in -ljs32... " >&6; } +if ${ac_cv_lib_js32_JS_NewContext+:} false; then : + $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS -LIBS="-lmozjs185 $LIBS" -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF +LIBS="-ljs32 $LIBS" +cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. @@ -17395,68 +17607,46 @@ #ifdef __cplusplus extern "C" #endif -char JS_NewObject (); +char JS_NewContext (); int main () { -return JS_NewObject (); +return JS_NewContext (); ; return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then - ac_cv_lib_mozjs185_JS_NewObject=yes +if ac_fn_cxx_try_link "$LINENO"; then : + ac_cv_lib_js32_JS_NewContext=yes else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_cv_lib_mozjs185_JS_NewObject=no + ac_cv_lib_js32_JS_NewContext=no fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi -{ echo "$as_me:$LINENO: result: $ac_cv_lib_mozjs185_JS_NewObject" >&5 -echo "${ECHO_T}$ac_cv_lib_mozjs185_JS_NewObject" >&6; } -if test $ac_cv_lib_mozjs185_JS_NewObject = yes; then - JS_LIB_BASE=mozjs185 +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_js32_JS_NewContext" >&5 +$as_echo "$ac_cv_lib_js32_JS_NewContext" >&6; } +if test "x$ac_cv_lib_js32_JS_NewContext" = xyes; then : + JS_LIB_BASE=js32 else - { { echo "$as_me:$LINENO: error: Could not find the js library. + as_fn_error $? "Could not find the js library. -Is the Mozilla SpiderMonkey library installed?" >&5 -echo "$as_me: error: Could not find the js library. +Is the Mozilla SpiderMonkey library installed?" "$LINENO" 5 -Is the Mozilla SpiderMonkey library installed?" >&2;} - { (exit 1); exit 1; }; } fi -fi fi + fi + fi + fi @@ -17465,19 +17655,15 @@ # Figure out what version of SpiderMonkey to use -as_ac_Lib=`echo "ac_cv_lib_$JS_LIB_BASE''_JS_NewCompartmentAndGlobalObject" | $as_tr_sh` -{ echo "$as_me:$LINENO: checking for JS_NewCompartmentAndGlobalObject in -l$JS_LIB_BASE" >&5 -echo $ECHO_N "checking for JS_NewCompartmentAndGlobalObject in -l$JS_LIB_BASE... $ECHO_C" >&6; } -if { as_var=$as_ac_Lib; eval "test \"\${$as_var+set}\" = set"; }; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +as_ac_Lib=`$as_echo "ac_cv_lib_$JS_LIB_BASE''_JS_NewCompartmentAndGlobalObject" | $as_tr_sh` +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for JS_NewCompartmentAndGlobalObject in -l$JS_LIB_BASE" >&5 +$as_echo_n "checking for JS_NewCompartmentAndGlobalObject in -l$JS_LIB_BASE... " >&6; } +if eval \${$as_ac_Lib+:} false; then : + $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-l$JS_LIB_BASE $LIBS" -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF +cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. @@ -17495,145 +17681,103 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then +if ac_fn_cxx_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - eval "$as_ac_Lib=no" + eval "$as_ac_Lib=no" fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi -ac_res=`eval echo '${'$as_ac_Lib'}'` - { echo "$as_me:$LINENO: result: $ac_res" >&5 -echo "${ECHO_T}$ac_res" >&6; } -if test `eval echo '${'$as_ac_Lib'}'` = yes; then +eval ac_res=\$$as_ac_Lib + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : # Prevent people from accidentally using SpiderMonkey's that are too new - if test "$use_js_trunk" = "no"; then - { echo "$as_me:$LINENO: checking whether JSOPTION_ANONFUNFIX is declared" >&5 -echo $ECHO_N "checking whether JSOPTION_ANONFUNFIX is declared... $ECHO_C" >&6; } -if test "${ac_cv_have_decl_JSOPTION_ANONFUNFIX+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + +# ac_fn_cxx_check_decl LINENO SYMBOL VAR INCLUDES +# ----------------------------------------------- +# Tests whether SYMBOL is declared in INCLUDES, setting cache variable VAR +# accordingly. +ac_fn_cxx_check_decl () +{ + as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + as_decl_name=`echo $2|sed 's/ *(.*//'` + as_decl_use=`echo $2|sed -e 's/(/((/' -e 's/)/) 0&/' -e 's/,/) 0& (/g'` + { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $as_decl_name is declared" >&5 +$as_echo_n "checking whether $as_decl_name is declared... " >&6; } +if eval \${$3+:} false; then : + $as_echo_n "(cached) " >&6 else - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF + cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ -#include - +$4 int main () { -#ifndef JSOPTION_ANONFUNFIX - (void) JSOPTION_ANONFUNFIX; +#ifndef $as_decl_name +#ifdef __cplusplus + (void) $as_decl_use; +#else + (void) $as_decl_name; +#endif #endif ; return 0; } _ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then - ac_cv_have_decl_JSOPTION_ANONFUNFIX=yes +if ac_fn_cxx_try_compile "$LINENO"; then : + eval "$3=yes" else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_cv_have_decl_JSOPTION_ANONFUNFIX=no + eval "$3=no" fi - rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi -{ echo "$as_me:$LINENO: result: $ac_cv_have_decl_JSOPTION_ANONFUNFIX" >&5 -echo "${ECHO_T}$ac_cv_have_decl_JSOPTION_ANONFUNFIX" >&6; } -if test $ac_cv_have_decl_JSOPTION_ANONFUNFIX = yes; then - : -else - - { { echo "$as_me:$LINENO: error: Your SpiderMonkey library is too new. +eval ac_res=\$$3 + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } + eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno + +} # ac_fn_cxx_check_decl +ac_fn_cxx_check_decl "$LINENO" "JSOPTION_ANONFUNFIX" "ac_cv_have_decl_JSOPTION_ANONFUNFIX" " + #include -NOTE: Check above for an error about NSPR - -Versions of SpiderMonkey after the js185-1.0.0 release remove the optional -enforcement of preventing anonymous functions in a statement context. This -will most likely break your existing JavaScript code as well as render all -example code invalid. +" +if test "x$ac_cv_have_decl_JSOPTION_ANONFUNFIX" = xyes; then : -If you wish to ignore this error pass --enable-js-trunk to ./configure." >&5 -echo "$as_me: error: Your SpiderMonkey library is too new. +else -NOTE: Check above for an error about NSPR + as_fn_error $? "Your SpiderMonkey library is too new. Versions of SpiderMonkey after the js185-1.0.0 release remove the optional enforcement of preventing anonymous functions in a statement context. This will most likely break your existing JavaScript code as well as render all example code invalid. -If you wish to ignore this error pass --enable-js-trunk to ./configure." >&2;} - { (exit 1); exit 1; }; } +If you wish to ignore this error pass --enable-js-trunk to ./configure." "$LINENO" 5 + fi fi -cat >>confdefs.h <<\_ACEOF -#define SM185 1 -_ACEOF +$as_echo "#define SM185 1" >>confdefs.h + fi -as_ac_Lib=`echo "ac_cv_lib_$JS_LIB_BASE''_JS_ThrowStopIteration" | $as_tr_sh` -{ echo "$as_me:$LINENO: checking for JS_ThrowStopIteration in -l$JS_LIB_BASE" >&5 -echo $ECHO_N "checking for JS_ThrowStopIteration in -l$JS_LIB_BASE... $ECHO_C" >&6; } -if { as_var=$as_ac_Lib; eval "test \"\${$as_var+set}\" = set"; }; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +as_ac_Lib=`$as_echo "ac_cv_lib_$JS_LIB_BASE''_JS_ThrowStopIteration" | $as_tr_sh` +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for JS_ThrowStopIteration in -l$JS_LIB_BASE" >&5 +$as_echo_n "checking for JS_ThrowStopIteration in -l$JS_LIB_BASE... " >&6; } +if eval \${$as_ac_Lib+:} false; then : + $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-l$JS_LIB_BASE $LIBS" -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF +cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. @@ -17651,61 +17795,35 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then +if ac_fn_cxx_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - eval "$as_ac_Lib=no" + eval "$as_ac_Lib=no" fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi -ac_res=`eval echo '${'$as_ac_Lib'}'` - { echo "$as_me:$LINENO: result: $ac_res" >&5 -echo "${ECHO_T}$ac_res" >&6; } -if test `eval echo '${'$as_ac_Lib'}'` = yes; then +eval ac_res=\$$as_ac_Lib + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : + +$as_echo "#define SM180 1" >>confdefs.h -cat >>confdefs.h <<\_ACEOF -#define SM180 1 -_ACEOF fi -as_ac_Lib=`echo "ac_cv_lib_$JS_LIB_BASE''_JS_GetStringCharsAndLength" | $as_tr_sh` -{ echo "$as_me:$LINENO: checking for JS_GetStringCharsAndLength in -l$JS_LIB_BASE" >&5 -echo $ECHO_N "checking for JS_GetStringCharsAndLength in -l$JS_LIB_BASE... $ECHO_C" >&6; } -if { as_var=$as_ac_Lib; eval "test \"\${$as_var+set}\" = set"; }; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +as_ac_Lib=`$as_echo "ac_cv_lib_$JS_LIB_BASE''_JS_GetStringCharsAndLength" | $as_tr_sh` +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for JS_GetStringCharsAndLength in -l$JS_LIB_BASE" >&5 +$as_echo_n "checking for JS_GetStringCharsAndLength in -l$JS_LIB_BASE... " >&6; } +if eval \${$as_ac_Lib+:} false; then : + $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-l$JS_LIB_BASE $LIBS" -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF +cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. @@ -17723,135 +17841,52 @@ return 0; } _ACEOF -rm -f conftest.$ac_objext conftest$ac_exeext -if { (ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_link") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && - $as_test_x conftest$ac_exeext; then +if ac_fn_cxx_try_link "$LINENO"; then : eval "$as_ac_Lib=yes" else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - eval "$as_ac_Lib=no" + eval "$as_ac_Lib=no" fi - -rm -f core conftest.err conftest.$ac_objext conftest_ipa8_conftest.oo \ - conftest$ac_exeext conftest.$ac_ext +rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi -ac_res=`eval echo '${'$as_ac_Lib'}'` - { echo "$as_me:$LINENO: result: $ac_res" >&5 -echo "${ECHO_T}$ac_res" >&6; } -if test `eval echo '${'$as_ac_Lib'}'` = yes; then +eval ac_res=\$$as_ac_Lib + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 +$as_echo "$ac_res" >&6; } +if eval test \"x\$"$as_ac_Lib"\" = x"yes"; then : + +$as_echo "#define HAVE_JS_GET_STRING_CHARS_AND_LENGTH 1" >>confdefs.h -cat >>confdefs.h <<\_ACEOF -#define HAVE_JS_GET_STRING_CHARS_AND_LENGTH 1 -_ACEOF fi # Else, hope that 1.7.0 works -# Deal with JSScript -> JSObject -> JSScript switcheroo - -{ echo "$as_me:$LINENO: checking for JSScript*" >&5 -echo $ECHO_N "checking for JSScript*... $ECHO_C" >&6; } -if test "${ac_cv_type_JSScriptp+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -#include - - -typedef JSScript* ac__type_new_; -int -main () -{ -if ((ac__type_new_ *) 0) - return 0; -if (sizeof (ac__type_new_)) - return 0; - ; - return 0; -} -_ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then - ac_cv_type_JSScriptp=yes -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 +# Deal with JSScript to JSObject to JSScript switcheroo - ac_cv_type_JSScriptp=no -fi +as_ac_Type=`$as_echo "ac_cv_type_JSScript*" | $as_tr_sh` +ac_fn_cxx_check_type "$LINENO" "JSScript*" "$as_ac_Type" "#include -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -{ echo "$as_me:$LINENO: result: $ac_cv_type_JSScriptp" >&5 -echo "${ECHO_T}$ac_cv_type_JSScriptp" >&6; } -if test $ac_cv_type_JSScriptp = yes; then +" +if eval test \"x\$"$as_ac_Type"\" = x"yes"; then : -cat >>confdefs.h <<\_ACEOF -#define JSSCRIPT_TYPE JSScript* -_ACEOF +$as_echo "#define JSSCRIPT_TYPE JSScript*" >>confdefs.h else -cat >>confdefs.h <<\_ACEOF -#define JSSCRIPT_TYPE JSObject* -_ACEOF +$as_echo "#define JSSCRIPT_TYPE JSObject*" >>confdefs.h fi -cat >>confdefs.h <<\_ACEOF -#define COUCHJS_NAME "couchjs" -_ACEOF +$as_echo "#define COUCHJS_NAME \"couchjs\"" >>confdefs.h if test x${IS_WINDOWS} = xTRUE; then - -cat >>confdefs.h <<\_ACEOF -#define COUCHJS_NAME "couchjs.exe" -_ACEOF - +$as_echo "#define COUCHJS_NAME \"couchjs.exe\"" >>confdefs.h if test -f "$JS_LIB_DIR/$JS_LIB_BASE.dll"; then # seamonkey 1.7- build layout on Windows @@ -17861,31 +17896,27 @@ if test -f "$JS_LIB_DIR/../bin/$JS_LIB_BASE.dll"; then JS_LIB_BINARY="$JS_LIB_DIR/../bin/$JS_LIB_BASE.dll" else - { { echo "$as_me:$LINENO: error: Could not find $JS_LIB_BASE.dll." >&5 -echo "$as_me: error: Could not find $JS_LIB_BASE.dll." >&2;} - { (exit 1); exit 1; }; } + as_fn_error $? "Could not find $JS_LIB_BASE.dll." "$LINENO" 5 fi fi - # On windows we need to know the path to the openssl binaries. # Check whether --with-openssl-bin-dir was given. -if test "${with_openssl_bin_dir+set}" = set; then +if test "${with_openssl_bin_dir+set}" = set; then : withval=$with_openssl_bin_dir; openssl_bin_dir=`cygpath -m "$withval"` fi - # Windows uses Inno setup - look for its compiler. # Extract the first word of "iscc", so it can be a program name with args. set dummy iscc; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_path_INNO_COMPILER_EXECUTABLE+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_path_INNO_COMPILER_EXECUTABLE+:} false; then : + $as_echo_n "(cached) " >&6 else case $INNO_COMPILER_EXECUTABLE in [\\/]* | ?:[\\/]*) @@ -17897,14 +17928,14 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_INNO_COMPILER_EXECUTABLE="$as_dir/$ac_word$ac_exec_ext" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS ;; @@ -17912,26 +17943,25 @@ fi INNO_COMPILER_EXECUTABLE=$ac_cv_path_INNO_COMPILER_EXECUTABLE if test -n "$INNO_COMPILER_EXECUTABLE"; then - { echo "$as_me:$LINENO: result: $INNO_COMPILER_EXECUTABLE" >&5 -echo "${ECHO_T}$INNO_COMPILER_EXECUTABLE" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $INNO_COMPILER_EXECUTABLE" >&5 +$as_echo "$INNO_COMPILER_EXECUTABLE" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi if test x${INNO_COMPILER_EXECUTABLE} = x; then - { echo "$as_me:$LINENO: WARNING: You will be unable to build the Windows installer." >&5 -echo "$as_me: WARNING: You will be unable to build the Windows installer." >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: You will be unable to build the Windows installer." >&5 +$as_echo "$as_me: WARNING: You will be unable to build the Windows installer." >&2;} fi - # We need the msvc redistributables for this platform too # (in theory we could just install the assembly locally - but # there are at least 4 directories with binaries, meaning 4 copies; # so using the redist .exe means it ends up installed globally...) # Check whether --with-msvc-redist-dir was given. -if test "${with_msvc_redist_dir+set}" = set; then +if test "${with_msvc_redist_dir+set}" = set; then : withval=$with_msvc_redist_dir; msvc_redist_dir=`cygpath -m "$withval"` msvc_redist_name="vcredist_x86.exe" @@ -17941,25 +17971,23 @@ fi if test ! -f ${msvc_redist_dir}/${msvc_redist_name}; then - { echo "$as_me:$LINENO: WARNING: The MSVC redistributable seems to be missing; expect the installer to fail." >&5 -echo "$as_me: WARNING: The MSVC redistributable seems to be missing; expect the installer to fail." >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Installer may fail due to missing MSVC redistributable." >&5 +$as_echo "$as_me: WARNING: Installer may fail due to missing MSVC redistributable." >&2;} fi fi -JS_CFLAGS="$CPPFLAGS" -JS_LDFLAGS="$LDFLAGS" -JS_LIBS="-l$JS_LIB_BASE -lm $LIBS" -CPPFLAGS="$OLD_CPPFLAGS" -LDFLAGS="$OLD_LDFLAGS" -LIBS="$OLD_LIBS" - +JS_LIBS="-l$JS_LIB_BASE -lm $JS_LIBS" +LIBS="$OLD_LIBS" +CPPFLAGS="$OLD_CPPFLAGS" +%% auto detect "kegged" icu4c on Mac / Homebrew +PATH="/usr/local/opt/icu4c/bin:$PATH" # Check whether --with-win32-icu-binaries was given. -if test "${with_win32_icu_binaries+set}" = set; then +if test "${with_win32_icu_binaries+set}" = set; then : withval=$with_win32_icu_binaries; ICU_CPPFLAGS="-I$withval/include" ICU_LIBS="-L$withval/lib -licuuc -licudt -licuin" @@ -17973,10 +18001,10 @@ if test -z "$ICU_CONFIG"; then # Extract the first word of "icu-config", so it can be a program name with args. set dummy icu-config; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_path_ICU_CONFIG+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_path_ICU_CONFIG+:} false; then : + $as_echo_n "(cached) " >&6 else case $ICU_CONFIG in [\\/]* | ?:[\\/]*) @@ -17988,14 +18016,14 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_ICU_CONFIG="$as_dir/$ac_word$ac_exec_ext" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS test -z "$ac_cv_path_ICU_CONFIG" && ac_cv_path_ICU_CONFIG="no" @@ -18004,11 +18032,11 @@ fi ICU_CONFIG=$ac_cv_path_ICU_CONFIG if test -n "$ICU_CONFIG"; then - { echo "$as_me:$LINENO: result: $ICU_CONFIG" >&5 -echo "${ECHO_T}$ICU_CONFIG" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ICU_CONFIG" >&5 +$as_echo "$ICU_CONFIG" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi @@ -18020,32 +18048,39 @@ echo "*** Or see http://ibm.com/software/globalization/icu/" else ICU_VERSION=`$ICU_CONFIG --version` - { echo "$as_me:$LINENO: checking for ICU >= 3.4.1" >&5 -echo $ECHO_N "checking for ICU >= 3.4.1... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ICU >= 3.4.1" >&5 +$as_echo_n "checking for ICU >= 3.4.1... " >&6; } VERSION_CHECK=`expr $ICU_VERSION \>\= 3.4.1` if test "$VERSION_CHECK" = "1" ; then - { echo "$as_me:$LINENO: result: yes" >&5 -echo "${ECHO_T}yes" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } succeeded=yes - { echo "$as_me:$LINENO: checking ICU_CFLAGS" >&5 -echo $ECHO_N "checking ICU_CFLAGS... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking ICU_CPPFLAGS" >&5 +$as_echo_n "checking ICU_CPPFLAGS... " >&6; } + ICU_CPPFLAGS=`$ICU_CONFIG --cppflags` + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ICU_CPPFLAGS" >&5 +$as_echo "$ICU_CPPFLAGS" >&6; } + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking ICU_CFLAGS" >&5 +$as_echo_n "checking ICU_CFLAGS... " >&6; } ICU_CFLAGS=`$ICU_CONFIG --cflags` - { echo "$as_me:$LINENO: result: $ICU_CFLAGS" >&5 -echo "${ECHO_T}$ICU_CFLAGS" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ICU_CFLAGS" >&5 +$as_echo "$ICU_CFLAGS" >&6; } - { echo "$as_me:$LINENO: checking ICU_CXXFLAGS" >&5 -echo $ECHO_N "checking ICU_CXXFLAGS... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking ICU_CXXFLAGS" >&5 +$as_echo_n "checking ICU_CXXFLAGS... " >&6; } ICU_CXXFLAGS=`$ICU_CONFIG --cxxflags` - { echo "$as_me:$LINENO: result: $ICU_CXXFLAGS" >&5 -echo "${ECHO_T}$ICU_CXXFLAGS" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ICU_CXXFLAGS" >&5 +$as_echo "$ICU_CXXFLAGS" >&6; } - { echo "$as_me:$LINENO: checking ICU_LIBS" >&5 -echo $ECHO_N "checking ICU_LIBS... $ECHO_C" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: checking ICU_LIBS" >&5 +$as_echo_n "checking ICU_LIBS... " >&6; } ICU_LIBS=`$ICU_CONFIG --ldflags` - { echo "$as_me:$LINENO: result: $ICU_LIBS" >&5 -echo "${ECHO_T}$ICU_LIBS" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ICU_LIBS" >&5 +$as_echo "$ICU_LIBS" >&6; } else + ICU_CPPFLAGS="" ICU_CFLAGS="" ICU_CXXFLAGS="" ICU_LIBS="" @@ -18057,14 +18092,13 @@ + fi if test $succeeded = yes; then : else - { { echo "$as_me:$LINENO: error: Library requirements (ICU) not met." >&5 -echo "$as_me: error: Library requirements (ICU) not met." >&2;} - { (exit 1); exit 1; }; } + as_fn_error $? "Library requirements (ICU) not met." "$LINENO" 5 fi ICU_BIN= @@ -18079,8 +18113,9 @@ use_curl=yes + # Check whether --with-win32-curl was given. -if test "${with_win32_curl+set}" = set; then +if test "${with_win32_curl+set}" = set; then : withval=$with_win32_curl; # default build on windows is a static lib, and that's what we want too CURL_CFLAGS="-I$withval/include -DCURL_STATICLIB" @@ -18091,15 +18126,73 @@ else - succeeded=no - if test -z "$CURL_CONFIG"; then - # Extract the first word of "curl-config", so it can be a program name with args. -set dummy curl-config; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_path_CURL_CONFIG+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 + + + + +# Check whether --with-curl-prefix was given. +if test "${with_curl_prefix+set}" = set; then : + withval=$with_curl_prefix; curl_config_prefix="$withval" +else + curl_config_prefix="" +fi + + +# Check whether --with-curl-exec-prefix was given. +if test "${with_curl_exec_prefix+set}" = set; then : + withval=$with_curl_exec_prefix; curl_config_exec_prefix="$withval" +else + curl_config_exec_prefix="" +fi + + + + + + + if test x$CURL_CFLAGS != x -o x$CURL_LIBS != x; then : + + + + : + + +$as_echo "#define HAVE_CURL 1" >>confdefs.h + + + +else + + if test x$curl_config_exec_prefix != x; then : + + curl_config_args="$curl_config_args --exec-prefix=$curl_config_exec_prefix" + if test x${CURL_CONFIG+set} != xset; then : + + CURL_CONFIG=$curl_config_exec_prefix/bin/curl-config + +fi + +fi + if test x$curl_config_prefix != x; then : + + curl_config_args="$curl_config_args --prefix=$curl_config_prefix" + if test x${CURL_CONFIG+set} != xset; then : + + CURL_CONFIG=$curl_config_prefix/bin/curl-config + +fi + +fi + + for ac_prog in curl-config +do + # Extract the first word of "$ac_prog", so it can be a program name with args. +set dummy $ac_prog; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_path_CURL_CONFIG+:} false; then : + $as_echo_n "(cached) " >&6 else case $CURL_CONFIG in [\\/]* | ?:[\\/]*) @@ -18111,83 +18204,158 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_CURL_CONFIG="$as_dir/$ac_word$ac_exec_ext" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi -done -done -IFS=$as_save_IFS +done + done +IFS=$as_save_IFS + + ;; +esac +fi +CURL_CONFIG=$ac_cv_path_CURL_CONFIG +if test -n "$CURL_CONFIG"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CURL_CONFIG" >&5 +$as_echo "$CURL_CONFIG" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + test -n "$CURL_CONFIG" && break +done +test -n "$CURL_CONFIG" || CURL_CONFIG="no" + + if test "$CURL_CONFIG" == "no"; then : + + : + + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: You will be unable to run some JavaScript unit tests." >&5 +$as_echo "$as_me: WARNING: You will be unable to run some JavaScript unit tests." >&2;} + use_curl=no + CURL_LIBS= + + +else + + if test x"" == x; then : + + CURL_CFLAGS="`$CURL_CONFIG $curl_config_args --cflags`" + +else + + CURL_CFLAGS="`$CURL_CONFIG $curl_config_args `" + +fi + + if test x"" == x; then : + + CURL_LIBS="`$CURL_CONFIG $curl_config_args --libs`" + +else + + CURL_LIBS="`$CURL_CONFIG $curl_config_args `" + +fi + + if test x"7.18.0" != x; then : + + if test x"'s/^libcurl\ \+//'" != x; then : + + curl_version="`$CURL_CONFIG $curl_config_args --version | $SED -e 's/^libcurl\ \+//'`" + +else + + curl_version="`$CURL_CONFIG $curl_config_args --version | $SED -e 's/^\ *\(.*\)\ *$/\1/'`" + +fi + + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for curl ($curl_version) >= 7.18.0" >&5 +$as_echo_n "checking for curl ($curl_version) >= 7.18.0... " >&6; } + + + + # Used to indicate true or false condition + ax_compare_version=false + + # Convert the two version strings to be compared into a format that + # allows a simple string comparison. The end result is that a version + # string of the form 1.12.5-r617 will be converted to the form + # 0001001200050617. In other words, each number is zero padded to four + # digits, and non digits are removed. + + ax_compare_version_A=`echo "$curl_version" | sed -e 's/\([0-9]*\)/Z\1Z/g' \ + -e 's/Z\([0-9]\)Z/Z0\1Z/g' \ + -e 's/Z\([0-9][0-9]\)Z/Z0\1Z/g' \ + -e 's/Z\([0-9][0-9][0-9]\)Z/Z0\1Z/g' \ + -e 's/[^0-9]//g'` + + + ax_compare_version_B=`echo "7.18.0" | sed -e 's/\([0-9]*\)/Z\1Z/g' \ + -e 's/Z\([0-9]\)Z/Z0\1Z/g' \ + -e 's/Z\([0-9][0-9]\)Z/Z0\1Z/g' \ + -e 's/Z\([0-9][0-9][0-9]\)Z/Z0\1Z/g' \ + -e 's/[^0-9]//g'` + + + ax_compare_version=`echo "x$ax_compare_version_A +x$ax_compare_version_B" | sed 's/^ *//' | sort -r | sed "s/x${ax_compare_version_A}/true/;s/x${ax_compare_version_B}/false/;1q"` + + + + if test "$ax_compare_version" = "true" ; then + + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + + + + : + + +$as_echo "#define HAVE_CURL 1" >>confdefs.h + + + + else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + : + + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: You will be unable to run some JavaScript unit tests." >&5 +$as_echo "$as_me: WARNING: You will be unable to run some JavaScript unit tests." >&2;} + use_curl=no + CURL_LIBS= + + + fi + - test -z "$ac_cv_path_CURL_CONFIG" && ac_cv_path_CURL_CONFIG="no" - ;; -esac -fi -CURL_CONFIG=$ac_cv_path_CURL_CONFIG -if test -n "$CURL_CONFIG"; then - { echo "$as_me:$LINENO: result: $CURL_CONFIG" >&5 -echo "${ECHO_T}$CURL_CONFIG" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } -fi - fi - if test "$CURL_CONFIG" = "no" ; then - echo "*** The curl-config script could not be found. Make sure it is" - echo "*** in your path, and that curl is properly installed." - echo "*** Or see http://curl.haxx.se/" - else - CURL_VERSION=`$CURL_CONFIG --version | cut -d" " -f2` - { echo "$as_me:$LINENO: checking for curl >= 7.18.0" >&5 -echo $ECHO_N "checking for curl >= 7.18.0... $ECHO_C" >&6; } - VERSION_CHECK=`expr $CURL_VERSION \>\= 7.18.0` - if test "$VERSION_CHECK" = "1" ; then - { echo "$as_me:$LINENO: result: yes" >&5 -echo "${ECHO_T}yes" >&6; } - succeeded=yes + : - { echo "$as_me:$LINENO: checking CURL_CFLAGS" >&5 -echo $ECHO_N "checking CURL_CFLAGS... $ECHO_C" >&6; } - CURL_CFLAGS=`$CURL_CONFIG --cflags` - { echo "$as_me:$LINENO: result: $CURL_CFLAGS" >&5 -echo "${ECHO_T}$CURL_CFLAGS" >&6; } - - { echo "$as_me:$LINENO: checking CURL_LIBS" >&5 -echo $ECHO_N "checking CURL_LIBS... $ECHO_C" >&6; } - CURL_LIBS=`$CURL_CONFIG --libs` - { echo "$as_me:$LINENO: result: $CURL_LIBS" >&5 -echo "${ECHO_T}$CURL_LIBS" >&6; } - else - CURL_CFLAGS="" - CURL_LIBS="" - ## If we have a custom action on failure, don't print errors, but - ## do set a variable so people can do so. - fi +$as_echo "#define HAVE_CURL 1" >>confdefs.h - fi +fi - if test $succeeded = yes; then +fi + +fi -cat >>confdefs.h <<\_ACEOF -#define HAVE_CURL 1 -_ACEOF - else - { echo "$as_me:$LINENO: WARNING: You will be unable to run some JavaScript unit tests." >&5 -echo "$as_me: WARNING: You will be unable to run some JavaScript unit tests." >&2;} - use_curl=no - CURL_LIBS= - fi fi @@ -18210,31 +18378,32 @@ esac -erlang_version_error="The installed Erlang version is less than 5.6.5 (R12B05)." +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking Erlang version compatibility" >&5 +$as_echo_n "checking Erlang version compatibility... " >&6; } +erlang_version_error="The installed Erlang version must be >= R14B (erts-5.8.1) and &5 -echo "$as_me: error: $erlang_version_error" >&2;} - { (exit 1); exit 1; }; } +if test $major_version -ne 5; then + as_fn_error $? "$erlang_version_error" "$LINENO" 5 fi -if test `echo $version | ${AWK} "{print \\$2}"` -lt 6; then - { { echo "$as_me:$LINENO: error: $erlang_version_error" >&5 -echo "$as_me: error: $erlang_version_error" >&2;} - { (exit 1); exit 1; }; } +if test $minor_version -lt 8 -o $minor_version -gt 10; then + as_fn_error $? "$erlang_version_error" "$LINENO" 5 fi -if test `echo $version | ${AWK} "{print \\$2}"` -eq 6; then - if test `echo $version | ${AWK} "{print \\$3}"` -lt 5; then - { { echo "$as_me:$LINENO: error: $erlang_version_error" >&5 -echo "$as_me: error: $erlang_version_error" >&2;} - { (exit 1); exit 1; }; } - fi -fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: result: compatible" >&5 +$as_echo "compatible" >&6; } + +otp_release="`\ + ${ERL} -noshell \ + -eval 'io:put_chars(erlang:system_info(otp_release)).' \ + -s erlang halt`" + -otp_release="`${ERL} -noshell -eval 'io:put_chars(erlang:system_info(otp_release)).' -s erlang halt`" if test x$otp_release \> xR13B03; then USE_OTP_NIFS_TRUE= @@ -18253,20 +18422,23 @@ fi -has_crypto=`${ERL} -eval "case application:load(crypto) of ok -> ok; _ -> exit(no_crypto) end." -noshell -s init stop` +has_crypto=`\ + ${ERL} -eval "\ + case application:load(crypto) of ok -> ok; _ -> exit(no_crypto) end. \ + " -noshell -s init stop` if test -n "$has_crypto"; then - { { echo "$as_me:$LINENO: error: Could not find the Erlang crypto library. Has Erlang been compiled with OpenSSL support?" >&5 -echo "$as_me: error: Could not find the Erlang crypto library. Has Erlang been compiled with OpenSSL support?" >&2;} - { (exit 1); exit 1; }; } + as_fn_error $? "Could not find the Erlang crypto library. + +Has Erlang been compiled with OpenSSL support?" "$LINENO" 5 fi # Extract the first word of "erlc", so it can be a program name with args. set dummy erlc; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_path_ERLC+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_path_ERLC+:} false; then : + $as_echo_n "(cached) " >&6 else case $ERLC in [\\/]* | ?:[\\/]*) @@ -18278,14 +18450,14 @@ do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_ERLC="$as_dir/$ac_word$ac_exec_ext" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS ;; @@ -18293,300 +18465,480 @@ fi ERLC=$ac_cv_path_ERLC if test -n "$ERLC"; then - { echo "$as_me:$LINENO: result: $ERLC" >&5 -echo "${ECHO_T}$ERLC" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ERLC" >&5 +$as_echo "$ERLC" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi if test x${ERLC} = x; then - { { echo "$as_me:$LINENO: error: Could not find the \`erlc' executable. Is Erlang installed?" >&5 -echo "$as_me: error: Could not find the \`erlc' executable. Is Erlang installed?" >&2;} - { (exit 1); exit 1; }; } + as_fn_error $? "Could not find the \`erlc' executable. + +Is Erlang installed?" "$LINENO" 5 fi OLD_CPPFLAGS="$CPPFLAGS" CPPFLAGS="$ERLANG_FLAGS $CPPFLAGS" -if test "${ac_cv_header_erl_driver_h+set}" = set; then - { echo "$as_me:$LINENO: checking for erl_driver.h" >&5 -echo $ECHO_N "checking for erl_driver.h... $ECHO_C" >&6; } -if test "${ac_cv_header_erl_driver_h+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -fi -{ echo "$as_me:$LINENO: result: $ac_cv_header_erl_driver_h" >&5 -echo "${ECHO_T}$ac_cv_header_erl_driver_h" >&6; } -else - # Is the header compilable? -{ echo "$as_me:$LINENO: checking erl_driver.h usability" >&5 -echo $ECHO_N "checking erl_driver.h usability... $ECHO_C" >&6; } -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -$ac_includes_default -#include -_ACEOF -rm -f conftest.$ac_objext -if { (ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_compile") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && { - test -z "$ac_cxx_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then - ac_header_compiler=yes + +ac_fn_cxx_check_header_mongrel "$LINENO" "erl_driver.h" "ac_cv_header_erl_driver_h" "$ac_includes_default" +if test "x$ac_cv_header_erl_driver_h" = xyes; then : + else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - ac_header_compiler=no + as_fn_error $? "Could not find the \`erl_driver.h' header. + +Are the Erlang headers installed? + +Use the \`--with-erlang' option to specify the Erlang include directory." "$LINENO" 5 + fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -{ echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 -echo "${ECHO_T}$ac_header_compiler" >&6; } -# Is the header present? -{ echo "$as_me:$LINENO: checking erl_driver.h presence" >&5 -echo $ECHO_N "checking erl_driver.h presence... $ECHO_C" >&6; } -cat >conftest.$ac_ext <<_ACEOF -/* confdefs.h. */ -_ACEOF -cat confdefs.h >>conftest.$ac_ext -cat >>conftest.$ac_ext <<_ACEOF -/* end confdefs.h. */ -#include -_ACEOF -if { (ac_try="$ac_cpp conftest.$ac_ext" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval "echo \"\$as_me:$LINENO: $ac_try_echo\"") >&5 - (eval "$ac_cpp conftest.$ac_ext") 2>conftest.er1 - ac_status=$? - grep -v '^ *+' conftest.er1 >conftest.err - rm -f conftest.er1 - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } >/dev/null && { - test -z "$ac_cxx_preproc_warn_flag$ac_cxx_werror_flag" || - test ! -s conftest.err - }; then - ac_header_preproc=yes -else - echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - ac_header_preproc=no +CPPFLAGS="$OLD_CPPFLAGS" + +use_init=yes +use_launchd=yes +native_mochijson_enabled=no +tests_enabled=yes +docs_enabled=yes +strictness_enabled=no + +# Check whether --enable-init was given. +if test "${enable_init+set}" = set; then : + enableval=$enable_init; + use_init=$enableval + fi -rm -f conftest.err conftest.$ac_ext -{ echo "$as_me:$LINENO: result: $ac_header_preproc" >&5 -echo "${ECHO_T}$ac_header_preproc" >&6; } -# So? What about this header? -case $ac_header_compiler:$ac_header_preproc:$ac_cxx_preproc_warn_flag in - yes:no: ) - { echo "$as_me:$LINENO: WARNING: erl_driver.h: accepted by the compiler, rejected by the preprocessor!" >&5 -echo "$as_me: WARNING: erl_driver.h: accepted by the compiler, rejected by the preprocessor!" >&2;} - { echo "$as_me:$LINENO: WARNING: erl_driver.h: proceeding with the compiler's result" >&5 -echo "$as_me: WARNING: erl_driver.h: proceeding with the compiler's result" >&2;} - ac_header_preproc=yes - ;; - no:yes:* ) - { echo "$as_me:$LINENO: WARNING: erl_driver.h: present but cannot be compiled" >&5 -echo "$as_me: WARNING: erl_driver.h: present but cannot be compiled" >&2;} - { echo "$as_me:$LINENO: WARNING: erl_driver.h: check for missing prerequisite headers?" >&5 -echo "$as_me: WARNING: erl_driver.h: check for missing prerequisite headers?" >&2;} - { echo "$as_me:$LINENO: WARNING: erl_driver.h: see the Autoconf documentation" >&5 -echo "$as_me: WARNING: erl_driver.h: see the Autoconf documentation" >&2;} - { echo "$as_me:$LINENO: WARNING: erl_driver.h: section \"Present But Cannot Be Compiled\"" >&5 -echo "$as_me: WARNING: erl_driver.h: section \"Present But Cannot Be Compiled\"" >&2;} - { echo "$as_me:$LINENO: WARNING: erl_driver.h: proceeding with the preprocessor's result" >&5 -echo "$as_me: WARNING: erl_driver.h: proceeding with the preprocessor's result" >&2;} - { echo "$as_me:$LINENO: WARNING: erl_driver.h: in the future, the compiler will take precedence" >&5 -echo "$as_me: WARNING: erl_driver.h: in the future, the compiler will take precedence" >&2;} - ( cat <<\_ASBOX -## ------------------------------------------------------------ ## -## Report this to https://issues.apache.org/jira/browse/COUCHDB ## -## ------------------------------------------------------------ ## -_ASBOX - ) | sed "s/^/$as_me: WARNING: /" >&2 - ;; -esac -{ echo "$as_me:$LINENO: checking for erl_driver.h" >&5 -echo $ECHO_N "checking for erl_driver.h... $ECHO_C" >&6; } -if test "${ac_cv_header_erl_driver_h+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 -else - ac_cv_header_erl_driver_h=$ac_header_preproc +# Check whether --enable-launchd was given. +if test "${enable_launchd+set}" = set; then : + enableval=$enable_launchd; + use_launchd=$enableval + fi -{ echo "$as_me:$LINENO: result: $ac_cv_header_erl_driver_h" >&5 -echo "${ECHO_T}$ac_cv_header_erl_driver_h" >&6; } + + +# Check whether --enable-native-mochijson was given. +if test "${enable_native_mochijson+set}" = set; then : + enableval=$enable_native_mochijson; + native_mochijson_enabled=$enableval fi -if test $ac_cv_header_erl_driver_h = yes; then - : -else - { { echo "$as_me:$LINENO: error: Could not find the \`erl_driver.h' header. -Are the Erlang headers installed? Use the \`--with-erlang' option to specify the -path to the Erlang include directory." >&5 -echo "$as_me: error: Could not find the \`erl_driver.h' header. - -Are the Erlang headers installed? Use the \`--with-erlang' option to specify the -path to the Erlang include directory." >&2;} - { (exit 1); exit 1; }; } +# Check whether --enable-tests was given. +if test "${enable_tests+set}" = set; then : + enableval=$enable_tests; + tests_enabled=$enableval + fi -CPPFLAGS="$OLD_CPPFLAGS" +# Check whether --enable-docs was given. +if test "${enable_docs+set}" = set; then : + enableval=$enable_docs; + docs_enabled=$enableval + +fi + + +# Check whether --enable-strictness was given. +if test "${enable_strictness+set}" = set; then : + enableval=$enable_strictness; + strictness_enabled=$enableval + +fi + + +init_enabled=false +launchd_enabled=false + +if test "$use_init" = "yes"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: checking location of init directory" >&5 +$as_echo_n "checking location of init directory... " >&6; } + if test -d /etc/rc.d; then + init_enabled=true + initdir='${sysconfdir}/rc.d' + + { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${initdir}" >&5 +$as_echo "${initdir}" >&6; } + else + if test -d /etc/init.d; then + init_enabled=true + initdir='${sysconfdir}/init.d' + + { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${initdir}" >&5 +$as_echo "${initdir}" >&6; } + else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: not found" >&5 +$as_echo "not found" >&6; } + fi + fi +fi + +if test "$use_launchd" = "yes"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: checking location of launchd directory" >&5 +$as_echo_n "checking location of launchd directory... " >&6; } + if test -d /Library/LaunchDaemons; then + init_enabled=false + launchd_enabled=true + launchddir='${prefix}/Library/LaunchDaemons' + + { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${launchddir}" >&5 +$as_echo "${launchddir}" >&6; } + else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: not found" >&5 +$as_echo "not found" >&6; } + fi +fi # Extract the first word of "help2man", so it can be a program name with args. set dummy help2man; ac_word=$2 -{ echo "$as_me:$LINENO: checking for $ac_word" >&5 -echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6; } -if test "${ac_cv_path_HELP2MAN_EXECUTABLE+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_HAS_HELP2MAN+:} false; then : + $as_echo_n "(cached) " >&6 else - case $HELP2MAN_EXECUTABLE in - [\\/]* | ?:[\\/]*) - ac_cv_path_HELP2MAN_EXECUTABLE="$HELP2MAN_EXECUTABLE" # Let the user override the test with a path. - ;; - *) - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR + if test -n "$HAS_HELP2MAN"; then + ac_cv_prog_HAS_HELP2MAN="$HAS_HELP2MAN" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_path_HELP2MAN_EXECUTABLE="$as_dir/$ac_word$ac_exec_ext" - echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5 + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_HAS_HELP2MAN="yes" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done -done + done IFS=$as_save_IFS - ;; -esac fi -HELP2MAN_EXECUTABLE=$ac_cv_path_HELP2MAN_EXECUTABLE -if test -n "$HELP2MAN_EXECUTABLE"; then - { echo "$as_me:$LINENO: result: $HELP2MAN_EXECUTABLE" >&5 -echo "${ECHO_T}$HELP2MAN_EXECUTABLE" >&6; } +fi +HAS_HELP2MAN=$ac_cv_prog_HAS_HELP2MAN +if test -n "$HAS_HELP2MAN"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $HAS_HELP2MAN" >&5 +$as_echo "$HAS_HELP2MAN" >&6; } else - { echo "$as_me:$LINENO: result: no" >&5 -echo "${ECHO_T}no" >&6; } + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi -if test x${HELP2MAN_EXECUTABLE} = x; then - { echo "$as_me:$LINENO: WARNING: You will be unable to regenerate any man pages." >&5 -echo "$as_me: WARNING: You will be unable to regenerate any man pages." >&2;} + +if test x${HAS_HELP2MAN} = x; then + if test x${strictness_enabled} = xyes; then + as_fn_error $? "Could not find the \`help2man' executable." "$LINENO" 5 + else + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: You will be unable to regenerate man pages." >&5 +$as_echo "$as_me: WARNING: You will be unable to regenerate man pages." >&2;} + fi fi -use_init=yes -use_launchd=yes -native_mochijson_enabled=no +# Extract the first word of "pdflatex", so it can be a program name with args. +set dummy pdflatex; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_HAS_PDFLATEX+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$HAS_PDFLATEX"; then + ac_cv_prog_HAS_PDFLATEX="$HAS_PDFLATEX" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_HAS_PDFLATEX="yes" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS -# Check whether --enable-init was given. -if test "${enable_init+set}" = set; then - enableval=$enable_init; - use_init=$enableval +fi +fi +HAS_PDFLATEX=$ac_cv_prog_HAS_PDFLATEX +if test -n "$HAS_PDFLATEX"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $HAS_PDFLATEX" >&5 +$as_echo "$HAS_PDFLATEX" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi + + + +if test x${HAS_PDFLATEX} = x; then + if test x${strictness_enabled} = xyes; then + as_fn_error $? "Could not find the \`pdflatex' executable. + +Is LaTeX installed?" "$LINENO" 5 + else + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: You will be unable to regenerate PDF documentation." >&5 +$as_echo "$as_me: WARNING: You will be unable to regenerate PDF documentation." >&2;} + fi +fi + +# Extract the first word of "makeinfo", so it can be a program name with args. +set dummy makeinfo; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_HAS_MAKEINFO+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$HAS_MAKEINFO"; then + ac_cv_prog_HAS_MAKEINFO="$HAS_MAKEINFO" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_HAS_MAKEINFO="yes" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS fi +fi +HAS_MAKEINFO=$ac_cv_prog_HAS_MAKEINFO +if test -n "$HAS_MAKEINFO"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $HAS_MAKEINFO" >&5 +$as_echo "$HAS_MAKEINFO" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi -# Check whether --enable-launchd was given. -if test "${enable_launchd+set}" = set; then - enableval=$enable_launchd; - use_launchd=$enableval +if test x${HAS_MAKEINFO} = x; then + if test x${strictness_enabled} = xyes; then + as_fn_error $? "Could not find the \`makeinfo' executable. + +Is GNU Texinfo installed?" "$LINENO" 5 + else + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: You will be unable to regenerate info documentation." >&5 +$as_echo "$as_me: WARNING: You will be unable to regenerate info documentation." >&2;} + fi +fi + +# Extract the first word of "install-info", so it can be a program name with args. +set dummy install-info; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_HAS_INSTALLINFO+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$HAS_INSTALLINFO"; then + ac_cv_prog_HAS_INSTALLINFO="$HAS_INSTALLINFO" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_HAS_INSTALLINFO="yes" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +HAS_INSTALLINFO=$ac_cv_prog_HAS_INSTALLINFO +if test -n "$HAS_INSTALLINFO"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $HAS_INSTALLINFO" >&5 +$as_echo "$HAS_INSTALLINFO" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } fi -# Check whether --enable-native-mochijson was given. -if test "${enable_native_mochijson+set}" = set; then - enableval=$enable_native_mochijson; - native_mochijson_enabled=$enableval +if test x${HAS_INSTALLINFO} = x; then + if test x${strictness_enabled} = xyes; then + as_fn_error $? "Could not find the \`install-info' executable. + +Is GNU Texinfo installed?" "$LINENO" 5 + else + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: You will be unable to install info documentation." >&5 +$as_echo "$as_me: WARNING: You will be unable to install info documentation." >&2;} + fi fi +# Extract the first word of "sphinx-build", so it can be a program name with args. +set dummy sphinx-build; ac_word=$2 +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 +$as_echo_n "checking for $ac_word... " >&6; } +if ${ac_cv_prog_HAS_SPHINX_BUILD+:} false; then : + $as_echo_n "(cached) " >&6 +else + if test -n "$HAS_SPHINX_BUILD"; then + ac_cv_prog_HAS_SPHINX_BUILD="$HAS_SPHINX_BUILD" # Let the user override the test. +else +as_save_IFS=$IFS; IFS=$PATH_SEPARATOR +for as_dir in $PATH +do + IFS=$as_save_IFS + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then + ac_cv_prog_HAS_SPHINX_BUILD="yes" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +done + done +IFS=$as_save_IFS + +fi +fi +HAS_SPHINX_BUILD=$ac_cv_prog_HAS_SPHINX_BUILD +if test -n "$HAS_SPHINX_BUILD"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $HAS_SPHINX_BUILD" >&5 +$as_echo "$HAS_SPHINX_BUILD" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } +fi -init_enabled=false -launchd_enabled=false -if test "$use_init" = "yes"; then - { echo "$as_me:$LINENO: checking location of init directory" >&5 -echo $ECHO_N "checking location of init directory... $ECHO_C" >&6; } - if test -d /etc/rc.d; then - init_enabled=true - initdir='${sysconfdir}/rc.d' - { echo "$as_me:$LINENO: result: ${initdir}" >&5 -echo "${ECHO_T}${initdir}" >&6; } - else - if test -d /etc/init.d; then - init_enabled=true - initdir='${sysconfdir}/init.d' +if test x${HAS_SPHINX_BUILD} = x; then + if test x${strictness_enabled} = xyes; then + as_fn_error $? "Could not find the \`sphinx-build' executable." "$LINENO" 5 + else + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: You will be unable to regenerate documentation." >&5 +$as_echo "$as_me: WARNING: You will be unable to regenerate documentation." >&2;} + fi +fi - { echo "$as_me:$LINENO: result: ${initdir}" >&5 -echo "${ECHO_T}${initdir}" >&6; } - else - { echo "$as_me:$LINENO: result: not found" >&5 -echo "${ECHO_T}not found" >&6; } - fi +if test x${docs_enabled} = xyes; then + if test x${HAS_HELP2MAN} = xyes; then + build_man=yes + fi + if test x${HAS_SPHINX_BUILD} = xyes; then + build_html=yes + if test x${HAS_MAKEINFO} = xyes; then + build_info=yes + fi + if test x${HAS_PDFLATEX} = xyes; then + build_pdf=yes + fi fi fi -if test "$use_launchd" = "yes"; then - { echo "$as_me:$LINENO: checking location of launchd directory" >&5 -echo $ECHO_N "checking location of launchd directory... $ECHO_C" >&6; } - if test -d /Library/LaunchDaemons; then - init_enabled=false - launchd_enabled=true - launchddir='${prefix}/Library/LaunchDaemons' +if test x${strictness_enabled} = xyes; then - { echo "$as_me:$LINENO: result: ${launchddir}" >&5 -echo "${ECHO_T}${launchddir}" >&6; } - else - { echo "$as_me:$LINENO: result: not found" >&5 -echo "${ECHO_T}not found" >&6; } + if test -z $PYTHON; + then + PYTHON="python" fi -fi + PYTHON_NAME=`basename $PYTHON` + { $as_echo "$as_me:${as_lineno-$LINENO}: checking $PYTHON_NAME module: pygments" >&5 +$as_echo_n "checking $PYTHON_NAME module: pygments... " >&6; } + $PYTHON -c "import pygments" 2>/dev/null + if test $? -eq 0; + then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + eval HAVE_PYMOD_PYGMENTS=yes + else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + eval HAVE_PYMOD_PYGMENTS=no + # + if test -n "fatal" + then + as_fn_error $? "failed to find required module pygments" "$LINENO" 5 + exit 1 + fi + fi +else + if test -z $PYTHON; + then + PYTHON="python" + fi + PYTHON_NAME=`basename $PYTHON` + { $as_echo "$as_me:${as_lineno-$LINENO}: checking $PYTHON_NAME module: pygments" >&5 +$as_echo_n "checking $PYTHON_NAME module: pygments... " >&6; } + $PYTHON -c "import pygments" 2>/dev/null + if test $? -eq 0; + then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } + eval HAVE_PYMOD_PYGMENTS=yes + else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + eval HAVE_PYMOD_PYGMENTS=no + # + if test -n "" + then + as_fn_error $? "failed to find required module pygments" "$LINENO" 5 + exit 1 + fi + fi +fi +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking pygments version >= 1.5" >&5 +$as_echo_n "checking pygments version >= 1.5... " >&6; } -if test -n "$HELP2MAN_EXECUTABLE"; then - help2man_enabled=true -else - if test -f "$srcdir/bin/couchdb.1" -a -f "$srcdir/src/couchdb/priv/couchjs.1"; then - help2man_enabled=true +python 2> /dev/null << EOF +import sys +import pygments + +if float(pygments.__version__) >= 1.5: + sys.exit(0) +else: + sys.exit(1) +EOF + +if test $? -eq 0; then + { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 +$as_echo "yes" >&6; } +else + { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 +$as_echo "no" >&6; } + if test x${strictness_enabled} = xyes; then + as_fn_error 1 "Your copy of pygments is out of date." "$LINENO" 5 else - help2man_enabled=false + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Syntax highlighting may not work." >&5 +$as_echo "$as_me: WARNING: Syntax highlighting may not work." >&2;} fi fi + + + if test x${init_enabled} = xtrue; then INIT_TRUE= INIT_FALSE='#' @@ -18603,14 +18955,6 @@ LAUNCHD_FALSE= fi - if test x${help2man_enabled} = xtrue; then - HELP2MAN_TRUE= - HELP2MAN_FALSE='#' -else - HELP2MAN_TRUE='#' - HELP2MAN_FALSE= -fi - if test x${native_mochijson_enabled} = xyes; then USE_NATIVE_MOCHIJSON_TRUE= USE_NATIVE_MOCHIJSON_FALSE='#' @@ -18628,6 +18972,56 @@ fi + if test x${build_man} = xyes; then + BUILD_MAN_TRUE= + BUILD_MAN_FALSE='#' +else + BUILD_MAN_TRUE='#' + BUILD_MAN_FALSE= +fi + + if test x${build_info} = xyes; then + BUILD_INFO_TRUE= + BUILD_INFO_FALSE='#' +else + BUILD_INFO_TRUE='#' + BUILD_INFO_FALSE= +fi + + if test x${build_pdf} = xyes; then + BUILD_PDF_TRUE= + BUILD_PDF_FALSE='#' +else + BUILD_PDF_TRUE='#' + BUILD_PDF_FALSE= +fi + + if test x${build_html} = xyes; then + BUILD_HTML_TRUE= + BUILD_HTML_FALSE='#' +else + BUILD_HTML_TRUE='#' + BUILD_HTML_FALSE= +fi + + + if test x${tests_enabled} = xyes; then + TESTS_TRUE= + TESTS_FALSE='#' +else + TESTS_TRUE='#' + TESTS_FALSE= +fi + + if test x${strictness_enabled} = xyes; then + STRICTNESS_TRUE= + STRICTNESS_FALSE='#' +else + STRICTNESS_TRUE='#' + STRICTNESS_FALSE= +fi + + package_author_name="The Apache Software Foundation" package_author_address="dev@couchdb.apache.org" @@ -18639,11 +19033,11 @@ package_name="Apache CouchDB" -version="1.2.0" +version="1.4.0" version_major="1" -version_minor="2" +version_minor="4" version_revision="0" @@ -18723,8 +19117,20 @@ ac_config_files="$ac_config_files share/Makefile" +ac_config_files="$ac_config_files share/doc/Makefile" + +ac_config_files="$ac_config_files share/doc/build/Makefile" + ac_config_files="$ac_config_files src/Makefile" +ac_config_files="$ac_config_files src/couch_dbupdates/Makefile" + +ac_config_files="$ac_config_files src/couch_index/Makefile" + +ac_config_files="$ac_config_files src/couch_mrview/Makefile" + +ac_config_files="$ac_config_files src/couch_replicator/Makefile" + ac_config_files="$ac_config_files src/couchdb/couch.app.tpl" ac_config_files="$ac_config_files src/couchdb/Makefile" @@ -18789,12 +19195,13 @@ case $ac_val in #( *${as_nl}*) case $ac_var in #( - *_cv_*) { echo "$as_me:$LINENO: WARNING: Cache variable $ac_var contains a newline." >&5 -echo "$as_me: WARNING: Cache variable $ac_var contains a newline." >&2;} ;; + *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 +$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; esac case $ac_var in #( _ | IFS | as_nl) ;; #( - *) $as_unset $ac_var ;; + BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( + *) { eval $ac_var=; unset $ac_var;} ;; esac ;; esac done @@ -18802,8 +19209,8 @@ (set) 2>&1 | case $as_nl`(ac_space=' '; set) 2>&1` in #( *${as_nl}ac_space=\ *) - # `set' does not quote correctly, so add quotes (double-quote - # substitution turns \\\\ into \\, and sed turns \\ into \). + # `set' does not quote correctly, so add quotes: double-quote + # substitution turns \\\\ into \\, and sed turns \\ into \. sed -n \ "s/'/'\\\\''/g; s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" @@ -18825,13 +19232,24 @@ :end' >>confcache if diff "$cache_file" confcache >/dev/null 2>&1; then :; else if test -w "$cache_file"; then - test "x$cache_file" != "x/dev/null" && - { echo "$as_me:$LINENO: updating cache $cache_file" >&5 -echo "$as_me: updating cache $cache_file" >&6;} - cat confcache >$cache_file + if test "x$cache_file" != "x/dev/null"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 +$as_echo "$as_me: updating cache $cache_file" >&6;} + if test ! -f "$cache_file" || test -h "$cache_file"; then + cat confcache >"$cache_file" + else + case $cache_file in #( + */* | ?:*) + mv -f confcache "$cache_file"$$ && + mv -f "$cache_file"$$ "$cache_file" ;; #( + *) + mv -f confcache "$cache_file" ;; + esac + fi + fi else - { echo "$as_me:$LINENO: not updating unwritable cache $cache_file" >&5 -echo "$as_me: not updating unwritable cache $cache_file" >&6;} + { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 +$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;} fi fi rm -f confcache @@ -18844,104 +19262,107 @@ ac_libobjs= ac_ltlibobjs= +U= for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue # 1. Remove the extension, and $U if already installed. ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' - ac_i=`echo "$ac_i" | sed "$ac_script"` + ac_i=`$as_echo "$ac_i" | sed "$ac_script"` # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR # will be set to the directory where LIBOBJS objects are built. - ac_libobjs="$ac_libobjs \${LIBOBJDIR}$ac_i\$U.$ac_objext" - ac_ltlibobjs="$ac_ltlibobjs \${LIBOBJDIR}$ac_i"'$U.lo' + as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" + as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' done LIBOBJS=$ac_libobjs LTLIBOBJS=$ac_ltlibobjs + if test -n "$EXEEXT"; then + am__EXEEXT_TRUE= + am__EXEEXT_FALSE='#' +else + am__EXEEXT_TRUE='#' + am__EXEEXT_FALSE= +fi + if test -z "${AMDEP_TRUE}" && test -z "${AMDEP_FALSE}"; then - { { echo "$as_me:$LINENO: error: conditional \"AMDEP\" was never defined. -Usually this means the macro was only invoked conditionally." >&5 -echo "$as_me: error: conditional \"AMDEP\" was never defined. -Usually this means the macro was only invoked conditionally." >&2;} - { (exit 1); exit 1; }; } + as_fn_error $? "conditional \"AMDEP\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then - { { echo "$as_me:$LINENO: error: conditional \"am__fastdepCC\" was never defined. -Usually this means the macro was only invoked conditionally." >&5 -echo "$as_me: error: conditional \"am__fastdepCC\" was never defined. -Usually this means the macro was only invoked conditionally." >&2;} - { (exit 1); exit 1; }; } + as_fn_error $? "conditional \"am__fastdepCC\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi +if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then + as_fn_error $? "conditional \"am__fastdepCC\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${am__fastdepCXX_TRUE}" && test -z "${am__fastdepCXX_FALSE}"; then - { { echo "$as_me:$LINENO: error: conditional \"am__fastdepCXX\" was never defined. -Usually this means the macro was only invoked conditionally." >&5 -echo "$as_me: error: conditional \"am__fastdepCXX\" was never defined. -Usually this means the macro was only invoked conditionally." >&2;} - { (exit 1); exit 1; }; } + as_fn_error $? "conditional \"am__fastdepCXX\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi + if test -z "${WINDOWS_TRUE}" && test -z "${WINDOWS_FALSE}"; then - { { echo "$as_me:$LINENO: error: conditional \"WINDOWS\" was never defined. -Usually this means the macro was only invoked conditionally." >&5 -echo "$as_me: error: conditional \"WINDOWS\" was never defined. -Usually this means the macro was only invoked conditionally." >&2;} - { (exit 1); exit 1; }; } + as_fn_error $? "conditional \"WINDOWS\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${USE_OTP_NIFS_TRUE}" && test -z "${USE_OTP_NIFS_FALSE}"; then - { { echo "$as_me:$LINENO: error: conditional \"USE_OTP_NIFS\" was never defined. -Usually this means the macro was only invoked conditionally." >&5 -echo "$as_me: error: conditional \"USE_OTP_NIFS\" was never defined. -Usually this means the macro was only invoked conditionally." >&2;} - { (exit 1); exit 1; }; } + as_fn_error $? "conditional \"USE_OTP_NIFS\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${USE_EJSON_COMPARE_NIF_TRUE}" && test -z "${USE_EJSON_COMPARE_NIF_FALSE}"; then - { { echo "$as_me:$LINENO: error: conditional \"USE_EJSON_COMPARE_NIF\" was never defined. -Usually this means the macro was only invoked conditionally." >&5 -echo "$as_me: error: conditional \"USE_EJSON_COMPARE_NIF\" was never defined. -Usually this means the macro was only invoked conditionally." >&2;} - { (exit 1); exit 1; }; } + as_fn_error $? "conditional \"USE_EJSON_COMPARE_NIF\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${INIT_TRUE}" && test -z "${INIT_FALSE}"; then - { { echo "$as_me:$LINENO: error: conditional \"INIT\" was never defined. -Usually this means the macro was only invoked conditionally." >&5 -echo "$as_me: error: conditional \"INIT\" was never defined. -Usually this means the macro was only invoked conditionally." >&2;} - { (exit 1); exit 1; }; } + as_fn_error $? "conditional \"INIT\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${LAUNCHD_TRUE}" && test -z "${LAUNCHD_FALSE}"; then - { { echo "$as_me:$LINENO: error: conditional \"LAUNCHD\" was never defined. -Usually this means the macro was only invoked conditionally." >&5 -echo "$as_me: error: conditional \"LAUNCHD\" was never defined. -Usually this means the macro was only invoked conditionally." >&2;} - { (exit 1); exit 1; }; } -fi -if test -z "${HELP2MAN_TRUE}" && test -z "${HELP2MAN_FALSE}"; then - { { echo "$as_me:$LINENO: error: conditional \"HELP2MAN\" was never defined. -Usually this means the macro was only invoked conditionally." >&5 -echo "$as_me: error: conditional \"HELP2MAN\" was never defined. -Usually this means the macro was only invoked conditionally." >&2;} - { (exit 1); exit 1; }; } + as_fn_error $? "conditional \"LAUNCHD\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${USE_NATIVE_MOCHIJSON_TRUE}" && test -z "${USE_NATIVE_MOCHIJSON_FALSE}"; then - { { echo "$as_me:$LINENO: error: conditional \"USE_NATIVE_MOCHIJSON\" was never defined. -Usually this means the macro was only invoked conditionally." >&5 -echo "$as_me: error: conditional \"USE_NATIVE_MOCHIJSON\" was never defined. -Usually this means the macro was only invoked conditionally." >&2;} - { (exit 1); exit 1; }; } + as_fn_error $? "conditional \"USE_NATIVE_MOCHIJSON\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${USE_CURL_TRUE}" && test -z "${USE_CURL_FALSE}"; then - { { echo "$as_me:$LINENO: error: conditional \"USE_CURL\" was never defined. -Usually this means the macro was only invoked conditionally." >&5 -echo "$as_me: error: conditional \"USE_CURL\" was never defined. -Usually this means the macro was only invoked conditionally." >&2;} - { (exit 1); exit 1; }; } + as_fn_error $? "conditional \"USE_CURL\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi +if test -z "${BUILD_MAN_TRUE}" && test -z "${BUILD_MAN_FALSE}"; then + as_fn_error $? "conditional \"BUILD_MAN\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi +if test -z "${BUILD_INFO_TRUE}" && test -z "${BUILD_INFO_FALSE}"; then + as_fn_error $? "conditional \"BUILD_INFO\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi +if test -z "${BUILD_PDF_TRUE}" && test -z "${BUILD_PDF_FALSE}"; then + as_fn_error $? "conditional \"BUILD_PDF\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi +if test -z "${BUILD_HTML_TRUE}" && test -z "${BUILD_HTML_FALSE}"; then + as_fn_error $? "conditional \"BUILD_HTML\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi +if test -z "${TESTS_TRUE}" && test -z "${TESTS_FALSE}"; then + as_fn_error $? "conditional \"TESTS\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 +fi +if test -z "${STRICTNESS_TRUE}" && test -z "${STRICTNESS_FALSE}"; then + as_fn_error $? "conditional \"STRICTNESS\" was never defined. +Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi -: ${CONFIG_STATUS=./config.status} +: "${CONFIG_STATUS=./config.status}" +ac_write_fail=0 ac_clean_files_save=$ac_clean_files ac_clean_files="$ac_clean_files $CONFIG_STATUS" -{ echo "$as_me:$LINENO: creating $CONFIG_STATUS" >&5 -echo "$as_me: creating $CONFIG_STATUS" >&6;} -cat >$CONFIG_STATUS <<_ACEOF +{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 +$as_echo "$as_me: creating $CONFIG_STATUS" >&6;} +as_write_fail=0 +cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 #! $SHELL # Generated by $as_me. # Run this file to recreate the current configuration. @@ -18951,59 +19372,79 @@ debug=false ac_cs_recheck=false ac_cs_silent=false -SHELL=\${CONFIG_SHELL-$SHELL} -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF -## --------------------- ## -## M4sh Initialization. ## -## --------------------- ## +SHELL=\${CONFIG_SHELL-$SHELL} +export SHELL +_ASEOF +cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 +## -------------------- ## +## M4sh Initialization. ## +## -------------------- ## # Be more Bourne compatible DUALCASE=1; export DUALCASE # for MKS sh -if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then +if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: - # Zsh 3.x and 4.x performs word splitting on ${1+"$@"}, which + # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' setopt NO_GLOB_SUBST else - case `(set -o) 2>/dev/null` in - *posix*) set -o posix ;; + case `(set -o) 2>/dev/null` in #( + *posix*) : + set -o posix ;; #( + *) : + ;; esac - fi - - -# PATH needs CR -# Avoid depending upon Character Ranges. -as_cr_letters='abcdefghijklmnopqrstuvwxyz' -as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' -as_cr_Letters=$as_cr_letters$as_cr_LETTERS -as_cr_digits='0123456789' -as_cr_alnum=$as_cr_Letters$as_cr_digits - -# The user is always right. -if test "${PATH_SEPARATOR+set}" != set; then - echo "#! /bin/sh" >conf$$.sh - echo "exit 0" >>conf$$.sh - chmod +x conf$$.sh - if (PATH="/nonexistent;."; conf$$.sh) >/dev/null 2>&1; then - PATH_SEPARATOR=';' +as_nl=' +' +export as_nl +# Printing a long string crashes Solaris 7 /usr/bin/printf. +as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo +# Prefer a ksh shell builtin over an external printf program on Solaris, +# but without wasting forks for bash or zsh. +if test -z "$BASH_VERSION$ZSH_VERSION" \ + && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='print -r --' + as_echo_n='print -rn --' +elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='printf %s\n' + as_echo_n='printf %s' +else + if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then + as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' + as_echo_n='/usr/ucb/echo -n' else - PATH_SEPARATOR=: + as_echo_body='eval expr "X$1" : "X\\(.*\\)"' + as_echo_n_body='eval + arg=$1; + case $arg in #( + *"$as_nl"*) + expr "X$arg" : "X\\(.*\\)$as_nl"; + arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; + esac; + expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" + ' + export as_echo_n_body + as_echo_n='sh -c $as_echo_n_body as_echo' fi - rm -f conf$$.sh + export as_echo_body + as_echo='sh -c $as_echo_body as_echo' fi -# Support unset when possible. -if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then - as_unset=unset -else - as_unset=false +# The user is always right. +if test "${PATH_SEPARATOR+set}" != set; then + PATH_SEPARATOR=: + (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { + (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || + PATH_SEPARATOR=';' + } fi @@ -19012,20 +19453,19 @@ # there to prevent editors from complaining about space-tab. # (If _AS_PATH_WALK were called with IFS unset, it would disable word # splitting by setting IFS to empty value.) -as_nl=' -' IFS=" "" $as_nl" # Find who we are. Look in the path if we contain no directory separator. -case $0 in +as_myself= +case $0 in #(( *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. - test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break -done + test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break + done IFS=$as_save_IFS ;; @@ -19036,32 +19476,111 @@ as_myself=$0 fi if test ! -f "$as_myself"; then - echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 - { (exit 1); exit 1; } + $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 + exit 1 fi -# Work around bugs in pre-3.0 UWIN ksh. -for as_var in ENV MAIL MAILPATH -do ($as_unset $as_var) >/dev/null 2>&1 && $as_unset $as_var +# Unset variables that we do not need and which cause bugs (e.g. in +# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" +# suppresses any "Segmentation fault" message there. '((' could +# trigger a bug in pdksh 5.2.14. +for as_var in BASH_ENV ENV MAIL MAILPATH +do eval test x\${$as_var+set} = xset \ + && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : done PS1='$ ' PS2='> ' PS4='+ ' # NLS nuisances. -for as_var in \ - LANG LANGUAGE LC_ADDRESS LC_ALL LC_COLLATE LC_CTYPE LC_IDENTIFICATION \ - LC_MEASUREMENT LC_MESSAGES LC_MONETARY LC_NAME LC_NUMERIC LC_PAPER \ - LC_TELEPHONE LC_TIME -do - if (set +x; test -z "`(eval $as_var=C; export $as_var) 2>&1`"); then - eval $as_var=C; export $as_var - else - ($as_unset $as_var) >/dev/null 2>&1 && $as_unset $as_var - fi -done +LC_ALL=C +export LC_ALL +LANGUAGE=C +export LANGUAGE + +# CDPATH. +(unset CDPATH) >/dev/null 2>&1 && unset CDPATH + + +# as_fn_error STATUS ERROR [LINENO LOG_FD] +# ---------------------------------------- +# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are +# provided, also output the error to LOG_FD, referencing LINENO. Then exit the +# script with STATUS, using 1 if that was 0. +as_fn_error () +{ + as_status=$1; test $as_status -eq 0 && as_status=1 + if test "$4"; then + as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack + $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 + fi + $as_echo "$as_me: error: $2" >&2 + as_fn_exit $as_status +} # as_fn_error + + +# as_fn_set_status STATUS +# ----------------------- +# Set $? to STATUS, without forking. +as_fn_set_status () +{ + return $1 +} # as_fn_set_status + +# as_fn_exit STATUS +# ----------------- +# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. +as_fn_exit () +{ + set +e + as_fn_set_status $1 + exit $1 +} # as_fn_exit + +# as_fn_unset VAR +# --------------- +# Portably unset VAR. +as_fn_unset () +{ + { eval $1=; unset $1;} +} +as_unset=as_fn_unset +# as_fn_append VAR VALUE +# ---------------------- +# Append the text in VALUE to the end of the definition contained in VAR. Take +# advantage of any shell optimizations that allow amortized linear growth over +# repeated appends, instead of the typical quadratic growth present in naive +# implementations. +if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : + eval 'as_fn_append () + { + eval $1+=\$2 + }' +else + as_fn_append () + { + eval $1=\$$1\$2 + } +fi # as_fn_append + +# as_fn_arith ARG... +# ------------------ +# Perform arithmetic evaluation on the ARGs, and store the result in the +# global $as_val. Take advantage of shells that can avoid forks. The arguments +# must be portable across $(()) and expr. +if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : + eval 'as_fn_arith () + { + as_val=$(( $* )) + }' +else + as_fn_arith () + { + as_val=`expr "$@" || test $? -eq 1` + } +fi # as_fn_arith + -# Required to use basename. if expr a : '\(a\)' >/dev/null 2>&1 && test "X`expr 00001 : '.*\(...\)'`" = X001; then as_expr=expr @@ -19075,13 +19594,17 @@ as_basename=false fi +if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then + as_dirname=dirname +else + as_dirname=false +fi -# Name of the executable. as_me=`$as_basename -- "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)' \| . 2>/dev/null || -echo X/"$0" | +$as_echo X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/ q @@ -19096,131 +19619,118 @@ } s/.*/./; q'` -# CDPATH. -$as_unset CDPATH - - - - as_lineno_1=$LINENO - as_lineno_2=$LINENO - test "x$as_lineno_1" != "x$as_lineno_2" && - test "x`expr $as_lineno_1 + 1`" = "x$as_lineno_2" || { - - # Create $as_me.lineno as a copy of $as_myself, but with $LINENO - # uniformly replaced by the line number. The first 'sed' inserts a - # line-number line after each line using $LINENO; the second 'sed' - # does the real work. The second script uses 'N' to pair each - # line-number line with the line containing $LINENO, and appends - # trailing '-' during substitution so that $LINENO is not a special - # case at line end. - # (Raja R Harinath suggested sed '=', and Paul Eggert wrote the - # scripts with optimization help from Paolo Bonzini. Blame Lee - # E. McMahon (1931-1989) for sed's syntax. :-) - sed -n ' - p - /[$]LINENO/= - ' <$as_myself | - sed ' - s/[$]LINENO.*/&-/ - t lineno - b - :lineno - N - :loop - s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ - t loop - s/-\n.*// - ' >$as_me.lineno && - chmod +x "$as_me.lineno" || - { echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2 - { (exit 1); exit 1; }; } - - # Don't try to exec as it changes $[0], causing all sort of problems - # (the dirname of $[0] is not the place where we might find the - # original and so on. Autoconf is especially sensitive to this). - . "./$as_me.lineno" - # Exit status is that of the last command. - exit -} - - -if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then - as_dirname=dirname -else - as_dirname=false -fi +# Avoid depending upon Character Ranges. +as_cr_letters='abcdefghijklmnopqrstuvwxyz' +as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' +as_cr_Letters=$as_cr_letters$as_cr_LETTERS +as_cr_digits='0123456789' +as_cr_alnum=$as_cr_Letters$as_cr_digits ECHO_C= ECHO_N= ECHO_T= -case `echo -n x` in +case `echo -n x` in #((((( -n*) - case `echo 'x\c'` in + case `echo 'xy\c'` in *c*) ECHO_T=' ';; # ECHO_T is single tab character. - *) ECHO_C='\c';; + xy) ECHO_C='\c';; + *) echo `echo ksh88 bug on AIX 6.1` > /dev/null + ECHO_T=' ';; esac;; *) ECHO_N='-n';; esac -if expr a : '\(a\)' >/dev/null 2>&1 && - test "X`expr 00001 : '.*\(...\)'`" = X001; then - as_expr=expr -else - as_expr=false -fi - rm -f conf$$ conf$$.exe conf$$.file if test -d conf$$.dir; then rm -f conf$$.dir/conf$$.file else rm -f conf$$.dir - mkdir conf$$.dir + mkdir conf$$.dir 2>/dev/null fi -echo >conf$$.file -if ln -s conf$$.file conf$$ 2>/dev/null; then - as_ln_s='ln -s' - # ... but there are two gotchas: - # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. - # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -p'. - ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -p' -elif ln conf$$.file conf$$ 2>/dev/null; then - as_ln_s=ln +if (echo >conf$$.file) 2>/dev/null; then + if ln -s conf$$.file conf$$ 2>/dev/null; then + as_ln_s='ln -s' + # ... but there are two gotchas: + # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. + # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. + # In both cases, we have to default to `cp -pR'. + ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || + as_ln_s='cp -pR' + elif ln conf$$.file conf$$ 2>/dev/null; then + as_ln_s=ln + else + as_ln_s='cp -pR' + fi else - as_ln_s='cp -p' + as_ln_s='cp -pR' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null + +# as_fn_mkdir_p +# ------------- +# Create "$as_dir" as a directory, including parents if necessary. +as_fn_mkdir_p () +{ + + case $as_dir in #( + -*) as_dir=./$as_dir;; + esac + test -d "$as_dir" || eval $as_mkdir_p || { + as_dirs= + while :; do + case $as_dir in #( + *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( + *) as_qdir=$as_dir;; + esac + as_dirs="'$as_qdir' $as_dirs" + as_dir=`$as_dirname -- "$as_dir" || +$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$as_dir" : 'X\(//\)[^/]' \| \ + X"$as_dir" : 'X\(//\)$' \| \ + X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$as_dir" | + sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ + s//\1/ + q + } + /^X\(\/\/\)[^/].*/{ + s//\1/ + q + } + /^X\(\/\/\)$/{ + s//\1/ + q + } + /^X\(\/\).*/{ + s//\1/ + q + } + s/.*/./; q'` + test -d "$as_dir" && break + done + test -z "$as_dirs" || eval "mkdir $as_dirs" + } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" + + +} # as_fn_mkdir_p if mkdir -p . 2>/dev/null; then - as_mkdir_p=: + as_mkdir_p='mkdir -p "$as_dir"' else test -d ./-p && rmdir ./-p as_mkdir_p=false fi -if test -x / >/dev/null 2>&1; then - as_test_x='test -x' -else - if ls -dL / >/dev/null 2>&1; then - as_ls_L_option=L - else - as_ls_L_option= - fi - as_test_x=' - eval sh -c '\'' - if test -d "$1"; then - test -d "$1/."; - else - case $1 in - -*)set "./$1";; - esac; - case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in - ???[sx]*):;;*)false;;esac;fi - '\'' sh - ' -fi -as_executable_p=$as_test_x + +# as_fn_executable_p FILE +# ----------------------- +# Test if FILE is an executable regular file. +as_fn_executable_p () +{ + test -f "$1" && test -x "$1" +} # as_fn_executable_p +as_test_x='test -x' +as_executable_p=as_fn_executable_p # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" @@ -19230,13 +19740,19 @@ exec 6>&1 +## ----------------------------------- ## +## Main body of $CONFIG_STATUS script. ## +## ----------------------------------- ## +_ASEOF +test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 -# Save the log message, to keep $[0] and so on meaningful, and to +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# Save the log message, to keep $0 and so on meaningful, and to # report actual input values of CONFIG_FILES etc. instead of their # values after options handling. ac_log=" -This file was extended by Apache CouchDB $as_me 1.2.0, which was -generated by GNU Autoconf 2.61. Invocation command line was +This file was extended by Apache CouchDB $as_me 1.4.0, which was +generated by GNU Autoconf 2.69. Invocation command line was CONFIG_FILES = $CONFIG_FILES CONFIG_HEADERS = $CONFIG_HEADERS @@ -19249,7 +19765,16 @@ _ACEOF -cat >>$CONFIG_STATUS <<_ACEOF +case $ac_config_files in *" +"*) set x $ac_config_files; shift; ac_config_files=$*;; +esac + +case $ac_config_headers in *" +"*) set x $ac_config_headers; shift; ac_config_headers=$*;; +esac + + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 # Files that config.status was made for. config_files="$ac_config_files" config_headers="$ac_config_headers" @@ -19257,22 +19782,25 @@ _ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 ac_cs_usage="\ -\`$as_me' instantiates files from templates according to the -current configuration. +\`$as_me' instantiates files and other configuration actions +from templates according to the current configuration. Unless the files +and actions are specified as TAGs, all are instantiated by default. -Usage: $0 [OPTIONS] [FILE]... +Usage: $0 [OPTION]... [TAG]... -h, --help print this help, then exit -V, --version print version number and configuration settings, then exit - -q, --quiet do not print progress messages + --config print configuration, then exit + -q, --quiet, --silent + do not print progress messages -d, --debug don't remove temporary files --recheck update $as_me by reconfiguring in the same conditions - --file=FILE[:TEMPLATE] - instantiate the configuration file FILE - --header=FILE[:TEMPLATE] - instantiate the configuration header FILE + --file=FILE[:TEMPLATE] + instantiate the configuration file FILE + --header=FILE[:TEMPLATE] + instantiate the configuration header FILE Configuration files: $config_files @@ -19283,16 +19811,17 @@ Configuration commands: $config_commands -Report bugs to ." +Report bugs to ." _ACEOF -cat >>$CONFIG_STATUS <<_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" ac_cs_version="\\ -Apache CouchDB config.status 1.2.0 -configured by $0, generated by GNU Autoconf 2.61, - with options \\"`echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`\\" +Apache CouchDB config.status 1.4.0 +configured by $0, generated by GNU Autoconf 2.69, + with options \\"\$ac_cs_config\\" -Copyright (C) 2006 Free Software Foundation, Inc. +Copyright (C) 2012 Free Software Foundation, Inc. This config.status script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it." @@ -19300,20 +19829,26 @@ srcdir='$srcdir' INSTALL='$INSTALL' MKDIR_P='$MKDIR_P' +AWK='$AWK' +test -n "\$AWK" || AWK=awk _ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF -# If no file are specified by the user, then we need to provide default -# value. By we need to know if files were specified by the user. +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +# The default lists apply if the user does not specify any file. ac_need_defaults=: while test $# != 0 do case $1 in - --*=*) + --*=?*) ac_option=`expr "X$1" : 'X\([^=]*\)='` ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` ac_shift=: ;; + --*=) + ac_option=`expr "X$1" : 'X\([^=]*\)='` + ac_optarg= + ac_shift=: + ;; *) ac_option=$1 ac_optarg=$2 @@ -19326,34 +19861,41 @@ -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) ac_cs_recheck=: ;; --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) - echo "$ac_cs_version"; exit ;; + $as_echo "$ac_cs_version"; exit ;; + --config | --confi | --conf | --con | --co | --c ) + $as_echo "$ac_cs_config"; exit ;; --debug | --debu | --deb | --de | --d | -d ) debug=: ;; --file | --fil | --fi | --f ) $ac_shift - CONFIG_FILES="$CONFIG_FILES $ac_optarg" + case $ac_optarg in + *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; + '') as_fn_error $? "missing file argument" ;; + esac + as_fn_append CONFIG_FILES " '$ac_optarg'" ac_need_defaults=false;; --header | --heade | --head | --hea ) $ac_shift - CONFIG_HEADERS="$CONFIG_HEADERS $ac_optarg" + case $ac_optarg in + *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; + esac + as_fn_append CONFIG_HEADERS " '$ac_optarg'" ac_need_defaults=false;; --he | --h) # Conflict between --help and --header - { echo "$as_me: error: ambiguous option: $1 -Try \`$0 --help' for more information." >&2 - { (exit 1); exit 1; }; };; + as_fn_error $? "ambiguous option: \`$1' +Try \`$0 --help' for more information.";; --help | --hel | -h ) - echo "$ac_cs_usage"; exit ;; + $as_echo "$ac_cs_usage"; exit ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil | --si | --s) ac_cs_silent=: ;; # This is an error. - -*) { echo "$as_me: error: unrecognized option: $1 -Try \`$0 --help' for more information." >&2 - { (exit 1); exit 1; }; } ;; + -*) as_fn_error $? "unrecognized option: \`$1' +Try \`$0 --help' for more information." ;; - *) ac_config_targets="$ac_config_targets $1" + *) as_fn_append ac_config_targets " $1" ac_need_defaults=false ;; esac @@ -19368,27 +19910,29 @@ fi _ACEOF -cat >>$CONFIG_STATUS <<_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 if \$ac_cs_recheck; then - echo "running CONFIG_SHELL=$SHELL $SHELL $0 "$ac_configure_args \$ac_configure_extra_args " --no-create --no-recursion" >&6 - CONFIG_SHELL=$SHELL + set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion + shift + \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 + CONFIG_SHELL='$SHELL' export CONFIG_SHELL - exec $SHELL "$0"$ac_configure_args \$ac_configure_extra_args --no-create --no-recursion + exec "\$@" fi _ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 exec 5>>config.log { echo sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX ## Running $as_me. ## _ASBOX - echo "$ac_log" + $as_echo "$ac_log" } >&5 _ACEOF -cat >>$CONFIG_STATUS <<_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 # # INIT-COMMANDS # @@ -19433,12 +19977,18 @@ lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' +lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`' +lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`' reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' +file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`' +want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`' +sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`' AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' +archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`' STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' @@ -19453,14 +20003,17 @@ lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' +nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' +lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' -lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' +lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' +MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`' DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' @@ -19493,12 +20046,12 @@ hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' -fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`' always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' +postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`' file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' @@ -19537,8 +20090,8 @@ compiler_CXX='`$ECHO "$compiler_CXX" | $SED "$delay_single_quote_subst"`' GCC_CXX='`$ECHO "$GCC_CXX" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_no_builtin_flag_CXX='`$ECHO "$lt_prog_compiler_no_builtin_flag_CXX" | $SED "$delay_single_quote_subst"`' -lt_prog_compiler_wl_CXX='`$ECHO "$lt_prog_compiler_wl_CXX" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_pic_CXX='`$ECHO "$lt_prog_compiler_pic_CXX" | $SED "$delay_single_quote_subst"`' +lt_prog_compiler_wl_CXX='`$ECHO "$lt_prog_compiler_wl_CXX" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_static_CXX='`$ECHO "$lt_prog_compiler_static_CXX" | $SED "$delay_single_quote_subst"`' lt_cv_prog_compiler_c_o_CXX='`$ECHO "$lt_cv_prog_compiler_c_o_CXX" | $SED "$delay_single_quote_subst"`' archive_cmds_need_lc_CXX='`$ECHO "$archive_cmds_need_lc_CXX" | $SED "$delay_single_quote_subst"`' @@ -19565,12 +20118,12 @@ hardcode_automatic_CXX='`$ECHO "$hardcode_automatic_CXX" | $SED "$delay_single_quote_subst"`' inherit_rpath_CXX='`$ECHO "$inherit_rpath_CXX" | $SED "$delay_single_quote_subst"`' link_all_deplibs_CXX='`$ECHO "$link_all_deplibs_CXX" | $SED "$delay_single_quote_subst"`' -fix_srcfile_path_CXX='`$ECHO "$fix_srcfile_path_CXX" | $SED "$delay_single_quote_subst"`' always_export_symbols_CXX='`$ECHO "$always_export_symbols_CXX" | $SED "$delay_single_quote_subst"`' export_symbols_cmds_CXX='`$ECHO "$export_symbols_cmds_CXX" | $SED "$delay_single_quote_subst"`' exclude_expsyms_CXX='`$ECHO "$exclude_expsyms_CXX" | $SED "$delay_single_quote_subst"`' include_expsyms_CXX='`$ECHO "$include_expsyms_CXX" | $SED "$delay_single_quote_subst"`' prelink_cmds_CXX='`$ECHO "$prelink_cmds_CXX" | $SED "$delay_single_quote_subst"`' +postlink_cmds_CXX='`$ECHO "$postlink_cmds_CXX" | $SED "$delay_single_quote_subst"`' file_list_spec_CXX='`$ECHO "$file_list_spec_CXX" | $SED "$delay_single_quote_subst"`' hardcode_action_CXX='`$ECHO "$hardcode_action_CXX" | $SED "$delay_single_quote_subst"`' compiler_lib_search_dirs_CXX='`$ECHO "$compiler_lib_search_dirs_CXX" | $SED "$delay_single_quote_subst"`' @@ -19610,8 +20163,12 @@ reload_flag \ deplibs_check_method \ file_magic_cmd \ +file_magic_glob \ +want_nocaseglob \ +sharedlib_from_linklib_cmd \ AR \ AR_FLAGS \ +archiver_list_spec \ STRIP \ RANLIB \ CC \ @@ -19621,12 +20178,14 @@ lt_cv_sys_global_symbol_to_cdecl \ lt_cv_sys_global_symbol_to_c_name_address \ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ +nm_file_list_spec \ lt_prog_compiler_no_builtin_flag \ -lt_prog_compiler_wl \ lt_prog_compiler_pic \ +lt_prog_compiler_wl \ lt_prog_compiler_static \ lt_cv_prog_compiler_c_o \ need_locks \ +MANIFEST_TOOL \ DSYMUTIL \ NMEDIT \ LIPO \ @@ -19642,7 +20201,6 @@ hardcode_libdir_flag_spec \ hardcode_libdir_flag_spec_ld \ hardcode_libdir_separator \ -fix_srcfile_path \ exclude_expsyms \ include_expsyms \ file_list_spec \ @@ -19664,8 +20222,8 @@ reload_flag_CXX \ compiler_CXX \ lt_prog_compiler_no_builtin_flag_CXX \ -lt_prog_compiler_wl_CXX \ lt_prog_compiler_pic_CXX \ +lt_prog_compiler_wl_CXX \ lt_prog_compiler_static_CXX \ lt_cv_prog_compiler_c_o_CXX \ export_dynamic_flag_spec_CXX \ @@ -19677,7 +20235,6 @@ hardcode_libdir_flag_spec_CXX \ hardcode_libdir_flag_spec_ld_CXX \ hardcode_libdir_separator_CXX \ -fix_srcfile_path_CXX \ exclude_expsyms_CXX \ include_expsyms_CXX \ file_list_spec_CXX \ @@ -19711,6 +20268,7 @@ module_expsym_cmds \ export_symbols_cmds \ prelink_cmds \ +postlink_cmds \ postinstall_cmds \ postuninstall_cmds \ finish_cmds \ @@ -19725,7 +20283,8 @@ module_cmds_CXX \ module_expsym_cmds_CXX \ export_symbols_cmds_CXX \ -prelink_cmds_CXX; do +prelink_cmds_CXX \ +postlink_cmds_CXX; do case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in *[\\\\\\\`\\"\\\$]*) eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" @@ -19760,7 +20319,7 @@ _ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # Handling of arguments. for ac_config_target in $ac_config_targets @@ -19787,7 +20346,13 @@ "etc/windows/Makefile") CONFIG_FILES="$CONFIG_FILES etc/windows/Makefile" ;; "etc/Makefile") CONFIG_FILES="$CONFIG_FILES etc/Makefile" ;; "share/Makefile") CONFIG_FILES="$CONFIG_FILES share/Makefile" ;; + "share/doc/Makefile") CONFIG_FILES="$CONFIG_FILES share/doc/Makefile" ;; + "share/doc/build/Makefile") CONFIG_FILES="$CONFIG_FILES share/doc/build/Makefile" ;; "src/Makefile") CONFIG_FILES="$CONFIG_FILES src/Makefile" ;; + "src/couch_dbupdates/Makefile") CONFIG_FILES="$CONFIG_FILES src/couch_dbupdates/Makefile" ;; + "src/couch_index/Makefile") CONFIG_FILES="$CONFIG_FILES src/couch_index/Makefile" ;; + "src/couch_mrview/Makefile") CONFIG_FILES="$CONFIG_FILES src/couch_mrview/Makefile" ;; + "src/couch_replicator/Makefile") CONFIG_FILES="$CONFIG_FILES src/couch_replicator/Makefile" ;; "src/couchdb/couch.app.tpl") CONFIG_FILES="$CONFIG_FILES src/couchdb/couch.app.tpl" ;; "src/couchdb/Makefile") CONFIG_FILES="$CONFIG_FILES src/couchdb/Makefile" ;; "src/couchdb/priv/Makefile") CONFIG_FILES="$CONFIG_FILES src/couchdb/priv/Makefile" ;; @@ -19807,9 +20372,7 @@ "utils/Makefile") CONFIG_FILES="$CONFIG_FILES utils/Makefile" ;; "var/Makefile") CONFIG_FILES="$CONFIG_FILES var/Makefile" ;; - *) { { echo "$as_me:$LINENO: error: invalid argument: $ac_config_target" >&5 -echo "$as_me: error: invalid argument: $ac_config_target" >&2;} - { (exit 1); exit 1; }; };; + *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; esac done @@ -19832,340 +20395,302 @@ # after its creation but before its name has been assigned to `$tmp'. $debug || { - tmp= + tmp= ac_tmp= trap 'exit_status=$? - { test -z "$tmp" || test ! -d "$tmp" || rm -fr "$tmp"; } && exit $exit_status + : "${ac_tmp:=$tmp}" + { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status ' 0 - trap '{ (exit 1); exit 1; }' 1 2 13 15 + trap 'as_fn_exit 1' 1 2 13 15 } # Create a (secure) tmp directory for tmp files. { tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && - test -n "$tmp" && test -d "$tmp" + test -d "$tmp" } || { tmp=./conf$$-$RANDOM - (umask 077 && mkdir "$tmp") -} || -{ - echo "$me: cannot create a temporary directory in ." >&2 - { (exit 1); exit 1; } -} - -# -# Set up the sed scripts for CONFIG_FILES section. -# + (umask 077 && mkdir "$tmp") +} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 +ac_tmp=$tmp -# No need to generate the scripts if there are no CONFIG_FILES. -# This happens for instance when ./config.status config.h +# Set up the scripts for CONFIG_FILES section. +# No need to generate them if there are no CONFIG_FILES. +# This happens for instance with `./config.status config.h'. if test -n "$CONFIG_FILES"; then -_ACEOF - - - -ac_delim='%!_!# ' -for ac_last_try in false false false false false :; do - cat >conf$$subs.sed <<_ACEOF -SHELL!$SHELL$ac_delim -PATH_SEPARATOR!$PATH_SEPARATOR$ac_delim -PACKAGE_NAME!$PACKAGE_NAME$ac_delim -PACKAGE_TARNAME!$PACKAGE_TARNAME$ac_delim -PACKAGE_VERSION!$PACKAGE_VERSION$ac_delim -PACKAGE_STRING!$PACKAGE_STRING$ac_delim -PACKAGE_BUGREPORT!$PACKAGE_BUGREPORT$ac_delim -exec_prefix!$exec_prefix$ac_delim -prefix!$prefix$ac_delim -program_transform_name!$program_transform_name$ac_delim -bindir!$bindir$ac_delim -sbindir!$sbindir$ac_delim -libexecdir!$libexecdir$ac_delim -datarootdir!$datarootdir$ac_delim -datadir!$datadir$ac_delim -sysconfdir!$sysconfdir$ac_delim -sharedstatedir!$sharedstatedir$ac_delim -localstatedir!$localstatedir$ac_delim -includedir!$includedir$ac_delim -oldincludedir!$oldincludedir$ac_delim -docdir!$docdir$ac_delim -infodir!$infodir$ac_delim -htmldir!$htmldir$ac_delim -dvidir!$dvidir$ac_delim -pdfdir!$pdfdir$ac_delim -psdir!$psdir$ac_delim -libdir!$libdir$ac_delim -localedir!$localedir$ac_delim -mandir!$mandir$ac_delim -DEFS!$DEFS$ac_delim -ECHO_C!$ECHO_C$ac_delim -ECHO_N!$ECHO_N$ac_delim -ECHO_T!$ECHO_T$ac_delim -LIBS!$LIBS$ac_delim -build_alias!$build_alias$ac_delim -host_alias!$host_alias$ac_delim -target_alias!$target_alias$ac_delim -INSTALL_PROGRAM!$INSTALL_PROGRAM$ac_delim -INSTALL_SCRIPT!$INSTALL_SCRIPT$ac_delim -INSTALL_DATA!$INSTALL_DATA$ac_delim -am__isrc!$am__isrc$ac_delim -CYGPATH_W!$CYGPATH_W$ac_delim -PACKAGE!$PACKAGE$ac_delim -VERSION!$VERSION$ac_delim -ACLOCAL!$ACLOCAL$ac_delim -AUTOCONF!$AUTOCONF$ac_delim -AUTOMAKE!$AUTOMAKE$ac_delim -AUTOHEADER!$AUTOHEADER$ac_delim -MAKEINFO!$MAKEINFO$ac_delim -install_sh!$install_sh$ac_delim -STRIP!$STRIP$ac_delim -INSTALL_STRIP_PROGRAM!$INSTALL_STRIP_PROGRAM$ac_delim -mkdir_p!$mkdir_p$ac_delim -AWK!$AWK$ac_delim -SET_MAKE!$SET_MAKE$ac_delim -am__leading_dot!$am__leading_dot$ac_delim -AMTAR!$AMTAR$ac_delim -am__tar!$am__tar$ac_delim -am__untar!$am__untar$ac_delim -CC!$CC$ac_delim -CFLAGS!$CFLAGS$ac_delim -LDFLAGS!$LDFLAGS$ac_delim -CPPFLAGS!$CPPFLAGS$ac_delim -ac_ct_CC!$ac_ct_CC$ac_delim -EXEEXT!$EXEEXT$ac_delim -OBJEXT!$OBJEXT$ac_delim -DEPDIR!$DEPDIR$ac_delim -am__include!$am__include$ac_delim -am__quote!$am__quote$ac_delim -AMDEP_TRUE!$AMDEP_TRUE$ac_delim -AMDEP_FALSE!$AMDEP_FALSE$ac_delim -AMDEPBACKSLASH!$AMDEPBACKSLASH$ac_delim -CCDEPMODE!$CCDEPMODE$ac_delim -am__fastdepCC_TRUE!$am__fastdepCC_TRUE$ac_delim -am__fastdepCC_FALSE!$am__fastdepCC_FALSE$ac_delim -AS!$AS$ac_delim -DLLTOOL!$DLLTOOL$ac_delim -OBJDUMP!$OBJDUMP$ac_delim -LIBTOOL!$LIBTOOL$ac_delim -build!$build$ac_delim -build_cpu!$build_cpu$ac_delim -build_vendor!$build_vendor$ac_delim -build_os!$build_os$ac_delim -host!$host$ac_delim -host_cpu!$host_cpu$ac_delim -host_vendor!$host_vendor$ac_delim -host_os!$host_os$ac_delim -SED!$SED$ac_delim -GREP!$GREP$ac_delim -EGREP!$EGREP$ac_delim -FGREP!$FGREP$ac_delim -LD!$LD$ac_delim -DUMPBIN!$DUMPBIN$ac_delim -ac_ct_DUMPBIN!$ac_ct_DUMPBIN$ac_delim -NM!$NM$ac_delim -LN_S!$LN_S$ac_delim -AR!$AR$ac_delim -_ACEOF - - if test `sed -n "s/.*$ac_delim\$/X/p" conf$$subs.sed | grep -c X` = 97; then - break - elif $ac_last_try; then - { { echo "$as_me:$LINENO: error: could not make $CONFIG_STATUS" >&5 -echo "$as_me: error: could not make $CONFIG_STATUS" >&2;} - { (exit 1); exit 1; }; } - else - ac_delim="$ac_delim!$ac_delim _$ac_delim!! " - fi -done -ac_eof=`sed -n '/^CEOF[0-9]*$/s/CEOF/0/p' conf$$subs.sed` -if test -n "$ac_eof"; then - ac_eof=`echo "$ac_eof" | sort -nru | sed 1q` - ac_eof=`expr $ac_eof + 1` +ac_cr=`echo X | tr X '\015'` +# On cygwin, bash can eat \r inside `` if the user requested igncr. +# But we know of no other shell where ac_cr would be empty at this +# point, so we can use a bashism as a fallback. +if test "x$ac_cr" = x; then + eval ac_cr=\$\'\\r\' +fi +ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` +if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then + ac_cs_awk_cr='\\r' +else + ac_cs_awk_cr=$ac_cr fi -cat >>$CONFIG_STATUS <<_ACEOF -cat >"\$tmp/subs-1.sed" <<\CEOF$ac_eof -/@[a-zA-Z_][a-zA-Z_0-9]*@/!b -_ACEOF -sed ' -s/[,\\&]/\\&/g; s/@/@|#_!!_#|/g -s/^/s,@/; s/!/@,|#_!!_#|/ -:n -t n -s/'"$ac_delim"'$/,g/; t -s/$/\\/; p -N; s/^.*\n//; s/[,\\&]/\\&/g; s/@/@|#_!!_#|/g; b n -' >>$CONFIG_STATUS >$CONFIG_STATUS <<_ACEOF -CEOF$ac_eof +echo 'BEGIN {' >"$ac_tmp/subs1.awk" && _ACEOF +{ + echo "cat >conf$$subs.awk <<_ACEOF" && + echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && + echo "_ACEOF" +} >conf$$subs.sh || + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 +ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` ac_delim='%!_!# ' for ac_last_try in false false false false false :; do - cat >conf$$subs.sed <<_ACEOF -RANLIB!$RANLIB$ac_delim -DSYMUTIL!$DSYMUTIL$ac_delim -NMEDIT!$NMEDIT$ac_delim -LIPO!$LIPO$ac_delim -OTOOL!$OTOOL$ac_delim -OTOOL64!$OTOOL64$ac_delim -CPP!$CPP$ac_delim -PKG_CONFIG!$PKG_CONFIG$ac_delim -PKG_CONFIG_PATH!$PKG_CONFIG_PATH$ac_delim -PKG_CONFIG_LIBDIR!$PKG_CONFIG_LIBDIR$ac_delim -CXX!$CXX$ac_delim -CXXFLAGS!$CXXFLAGS$ac_delim -ac_ct_CXX!$ac_ct_CXX$ac_delim -CXXDEPMODE!$CXXDEPMODE$ac_delim -am__fastdepCXX_TRUE!$am__fastdepCXX_TRUE$ac_delim -am__fastdepCXX_FALSE!$am__fastdepCXX_FALSE$ac_delim -CXXCPP!$CXXCPP$ac_delim -ac_cv_have_stdint_h!$ac_cv_have_stdint_h$ac_delim -ac_cv_have_stddef_h!$ac_cv_have_stddef_h$ac_delim -SNAPPY_MAJOR!$SNAPPY_MAJOR$ac_delim -SNAPPY_MINOR!$SNAPPY_MINOR$ac_delim -SNAPPY_PATCHLEVEL!$SNAPPY_PATCHLEVEL$ac_delim -ERLC_FLAGS!$ERLC_FLAGS$ac_delim -FLAGS!$FLAGS$ac_delim -ERL!$ERL$ac_delim -ERLANG_FLAGS!$ERLANG_FLAGS$ac_delim -JS185_CFLAGS!$JS185_CFLAGS$ac_delim -JS185_LIBS!$JS185_LIBS$ac_delim -JS_CFLAGS!$JS_CFLAGS$ac_delim -JS_LIBS!$JS_LIBS$ac_delim -WINDOWS_TRUE!$WINDOWS_TRUE$ac_delim -WINDOWS_FALSE!$WINDOWS_FALSE$ac_delim -JS_LIB_BINARY!$JS_LIB_BINARY$ac_delim -openssl_bin_dir!$openssl_bin_dir$ac_delim -INNO_COMPILER_EXECUTABLE!$INNO_COMPILER_EXECUTABLE$ac_delim -msvc_redist_dir!$msvc_redist_dir$ac_delim -msvc_redist_name!$msvc_redist_name$ac_delim -JS_LDFLAGS!$JS_LDFLAGS$ac_delim -ICU_CONFIG!$ICU_CONFIG$ac_delim -ICU_CFLAGS!$ICU_CFLAGS$ac_delim -ICU_CXXFLAGS!$ICU_CXXFLAGS$ac_delim -ICU_LIBS!$ICU_LIBS$ac_delim -ICU_CPPFLAGS!$ICU_CPPFLAGS$ac_delim -ICU_BIN!$ICU_BIN$ac_delim -CURL_CONFIG!$CURL_CONFIG$ac_delim -CURL_CFLAGS!$CURL_CFLAGS$ac_delim -CURL_LIBS!$CURL_LIBS$ac_delim -otp_release!$otp_release$ac_delim -USE_OTP_NIFS_TRUE!$USE_OTP_NIFS_TRUE$ac_delim -USE_OTP_NIFS_FALSE!$USE_OTP_NIFS_FALSE$ac_delim -USE_EJSON_COMPARE_NIF_TRUE!$USE_EJSON_COMPARE_NIF_TRUE$ac_delim -USE_EJSON_COMPARE_NIF_FALSE!$USE_EJSON_COMPARE_NIF_FALSE$ac_delim -ERLC!$ERLC$ac_delim -HELP2MAN_EXECUTABLE!$HELP2MAN_EXECUTABLE$ac_delim -initdir!$initdir$ac_delim -launchddir!$launchddir$ac_delim -INIT_TRUE!$INIT_TRUE$ac_delim -INIT_FALSE!$INIT_FALSE$ac_delim -LAUNCHD_TRUE!$LAUNCHD_TRUE$ac_delim -LAUNCHD_FALSE!$LAUNCHD_FALSE$ac_delim -HELP2MAN_TRUE!$HELP2MAN_TRUE$ac_delim -HELP2MAN_FALSE!$HELP2MAN_FALSE$ac_delim -USE_NATIVE_MOCHIJSON_TRUE!$USE_NATIVE_MOCHIJSON_TRUE$ac_delim -USE_NATIVE_MOCHIJSON_FALSE!$USE_NATIVE_MOCHIJSON_FALSE$ac_delim -USE_CURL_TRUE!$USE_CURL_TRUE$ac_delim -USE_CURL_FALSE!$USE_CURL_FALSE$ac_delim -package_author_name!$package_author_name$ac_delim -package_author_address!$package_author_address$ac_delim -package_identifier!$package_identifier$ac_delim -package_tarname!$package_tarname$ac_delim -package_name!$package_name$ac_delim -version!$version$ac_delim -version_major!$version_major$ac_delim -version_minor!$version_minor$ac_delim -version_revision!$version_revision$ac_delim -version_stage!$version_stage$ac_delim -version_release!$version_release$ac_delim -bug_uri!$bug_uri$ac_delim -localconfdir!$localconfdir$ac_delim -localdatadir!$localdatadir$ac_delim -localdocdir!$localdocdir$ac_delim -locallibdir!$locallibdir$ac_delim -localstatelibdir!$localstatelibdir$ac_delim -localstatelogdir!$localstatelogdir$ac_delim -localstaterundir!$localstaterundir$ac_delim -locallibbindir!$locallibbindir$ac_delim -localerlanglibdir!$localerlanglibdir$ac_delim -abs_top_srcdir!$abs_top_srcdir$ac_delim -abs_top_builddir!$abs_top_builddir$ac_delim -LIBOBJS!$LIBOBJS$ac_delim -LTLIBOBJS!$LTLIBOBJS$ac_delim -_ACEOF + . ./conf$$subs.sh || + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 - if test `sed -n "s/.*$ac_delim\$/X/p" conf$$subs.sed | grep -c X` = 91; then + ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` + if test $ac_delim_n = $ac_delim_num; then break elif $ac_last_try; then - { { echo "$as_me:$LINENO: error: could not make $CONFIG_STATUS" >&5 -echo "$as_me: error: could not make $CONFIG_STATUS" >&2;} - { (exit 1); exit 1; }; } + as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 else ac_delim="$ac_delim!$ac_delim _$ac_delim!! " fi done +rm -f conf$$subs.sh -ac_eof=`sed -n '/^CEOF[0-9]*$/s/CEOF/0/p' conf$$subs.sed` -if test -n "$ac_eof"; then - ac_eof=`echo "$ac_eof" | sort -nru | sed 1q` - ac_eof=`expr $ac_eof + 1` -fi - -cat >>$CONFIG_STATUS <<_ACEOF -cat >"\$tmp/subs-2.sed" <<\CEOF$ac_eof -/@[a-zA-Z_][a-zA-Z_0-9]*@/!b end -_ACEOF -sed ' -s/[,\\&]/\\&/g; s/@/@|#_!!_#|/g -s/^/s,@/; s/!/@,|#_!!_#|/ -:n -t n -s/'"$ac_delim"'$/,g/; t -s/$/\\/; p -N; s/^.*\n//; s/[,\\&]/\\&/g; s/@/@|#_!!_#|/g; b n -' >>$CONFIG_STATUS >$CONFIG_STATUS <<_ACEOF -:end -s/|#_!!_#|//g -CEOF$ac_eof +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK && _ACEOF +sed -n ' +h +s/^/S["/; s/!.*/"]=/ +p +g +s/^[^!]*!// +:repl +t repl +s/'"$ac_delim"'$// +t delim +:nl +h +s/\(.\{148\}\)..*/\1/ +t more1 +s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ +p +n +b repl +:more1 +s/["\\]/\\&/g; s/^/"/; s/$/"\\/ +p +g +s/.\{148\}// +t nl +:delim +h +s/\(.\{148\}\)..*/\1/ +t more2 +s/["\\]/\\&/g; s/^/"/; s/$/"/ +p +b +:more2 +s/["\\]/\\&/g; s/^/"/; s/$/"\\/ +p +g +s/.\{148\}// +t delim +' >$CONFIG_STATUS || ac_write_fail=1 +rm -f conf$$subs.awk +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +_ACAWK +cat >>"\$ac_tmp/subs1.awk" <<_ACAWK && + for (key in S) S_is_set[key] = 1 + FS = "" + +} +{ + line = $ 0 + nfields = split(line, field, "@") + substed = 0 + len = length(field[1]) + for (i = 2; i < nfields; i++) { + key = field[i] + keylen = length(key) + if (S_is_set[key]) { + value = S[key] + line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) + len += length(value) + length(field[++i]) + substed = 1 + } else + len += 1 + keylen + } + + print line +} +_ACAWK +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 +if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then + sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" +else + cat +fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ + || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 +_ACEOF -# VPATH may cause trouble with some makes, so we remove $(srcdir), -# ${srcdir} and @srcdir@ from VPATH if srcdir is ".", strip leading and +# VPATH may cause trouble with some makes, so we remove sole $(srcdir), +# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and # trailing colons and then remove the whole line if VPATH becomes empty # (actually we leave an empty line to preserve line numbers). if test "x$srcdir" = x.; then - ac_vpsub='/^[ ]*VPATH[ ]*=/{ -s/:*\$(srcdir):*/:/ -s/:*\${srcdir}:*/:/ -s/:*@srcdir@:*/:/ -s/^\([^=]*=[ ]*\):*/\1/ + ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ +h +s/// +s/^/:/ +s/[ ]*$/:/ +s/:\$(srcdir):/:/g +s/:\${srcdir}:/:/g +s/:@srcdir@:/:/g +s/^:*// s/:*$// +x +s/\(=[ ]*\).*/\1/ +G +s/\n// s/^[^=]*=[ ]*$// }' fi -cat >>$CONFIG_STATUS <<\_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 fi # test -n "$CONFIG_FILES" +# Set up the scripts for CONFIG_HEADERS section. +# No need to generate them if there are no CONFIG_HEADERS. +# This happens for instance with `./config.status Makefile'. +if test -n "$CONFIG_HEADERS"; then +cat >"$ac_tmp/defines.awk" <<\_ACAWK || +BEGIN { +_ACEOF + +# Transform confdefs.h into an awk script `defines.awk', embedded as +# here-document in config.status, that substitutes the proper values into +# config.h.in to produce config.h. + +# Create a delimiter string that does not exist in confdefs.h, to ease +# handling of long lines. +ac_delim='%!_!# ' +for ac_last_try in false false :; do + ac_tt=`sed -n "/$ac_delim/p" confdefs.h` + if test -z "$ac_tt"; then + break + elif $ac_last_try; then + as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5 + else + ac_delim="$ac_delim!$ac_delim _$ac_delim!! " + fi +done + +# For the awk script, D is an array of macro values keyed by name, +# likewise P contains macro parameters if any. Preserve backslash +# newline sequences. + +ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]* +sed -n ' +s/.\{148\}/&'"$ac_delim"'/g +t rset +:rset +s/^[ ]*#[ ]*define[ ][ ]*/ / +t def +d +:def +s/\\$// +t bsnl +s/["\\]/\\&/g +s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ +D["\1"]=" \3"/p +s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2"/p +d +:bsnl +s/["\\]/\\&/g +s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ +D["\1"]=" \3\\\\\\n"\\/p +t cont +s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p +t cont +d +:cont +n +s/.\{148\}/&'"$ac_delim"'/g +t clear +:clear +s/\\$// +t bsnlc +s/["\\]/\\&/g; s/^/"/; s/$/"/p +d +:bsnlc +s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p +b cont +' >$CONFIG_STATUS || ac_write_fail=1 + +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 + for (key in D) D_is_set[key] = 1 + FS = "" +} +/^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ { + line = \$ 0 + split(line, arg, " ") + if (arg[1] == "#") { + defundef = arg[2] + mac1 = arg[3] + } else { + defundef = substr(arg[1], 2) + mac1 = arg[2] + } + split(mac1, mac2, "(") #) + macro = mac2[1] + prefix = substr(line, 1, index(line, defundef) - 1) + if (D_is_set[macro]) { + # Preserve the white space surrounding the "#". + print prefix "define", macro P[macro] D[macro] + next + } else { + # Replace #undef with comments. This is necessary, for example, + # in the case of _POSIX_SOURCE, which is predefined and required + # on some systems where configure will not decide to define it. + if (defundef == "undef") { + print "/*", prefix defundef, macro, "*/" + next + } + } +} +{ print } +_ACAWK +_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 + as_fn_error $? "could not setup config headers machinery" "$LINENO" 5 +fi # test -n "$CONFIG_HEADERS" + -for ac_tag in :F $CONFIG_FILES :H $CONFIG_HEADERS :C $CONFIG_COMMANDS +eval set X " :F $CONFIG_FILES :H $CONFIG_HEADERS :C $CONFIG_COMMANDS" +shift +for ac_tag do case $ac_tag in :[FHLC]) ac_mode=$ac_tag; continue;; esac case $ac_mode$ac_tag in :[FHL]*:*);; - :L* | :C*:*) { { echo "$as_me:$LINENO: error: Invalid tag $ac_tag." >&5 -echo "$as_me: error: Invalid tag $ac_tag." >&2;} - { (exit 1); exit 1; }; };; + :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;; :[FH]-) ac_tag=-:-;; :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; esac @@ -20184,7 +20709,7 @@ for ac_f do case $ac_f in - -) ac_f="$tmp/stdin";; + -) ac_f="$ac_tmp/stdin";; *) # Look for the file first in the build tree, then in the source tree # (if the path is not absolute). The absolute path cannot be DOS-style, # because $ac_f cannot contain `:'. @@ -20193,26 +20718,34 @@ [\\/$]*) false;; *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; esac || - { { echo "$as_me:$LINENO: error: cannot find input file: $ac_f" >&5 -echo "$as_me: error: cannot find input file: $ac_f" >&2;} - { (exit 1); exit 1; }; };; + as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;; esac - ac_file_inputs="$ac_file_inputs $ac_f" + case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac + as_fn_append ac_file_inputs " '$ac_f'" done # Let's still pretend it is `configure' which instantiates (i.e., don't # use $as_me), people would be surprised to read: # /* config.h. Generated by config.status. */ - configure_input="Generated from "`IFS=: - echo $* | sed 's|^[^:]*/||;s|:[^:]*/|, |g'`" by configure." + configure_input='Generated from '` + $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' + `' by configure.' if test x"$ac_file" != x-; then configure_input="$ac_file. $configure_input" - { echo "$as_me:$LINENO: creating $ac_file" >&5 -echo "$as_me: creating $ac_file" >&6;} + { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 +$as_echo "$as_me: creating $ac_file" >&6;} fi + # Neutralize special characters interpreted by sed in replacement strings. + case $configure_input in #( + *\&* | *\|* | *\\* ) + ac_sed_conf_input=`$as_echo "$configure_input" | + sed 's/[\\\\&|]/\\\\&/g'`;; #( + *) ac_sed_conf_input=$configure_input;; + esac case $ac_tag in - *:-:* | *:-) cat >"$tmp/stdin";; + *:-:* | *:-) cat >"$ac_tmp/stdin" \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; esac ;; esac @@ -20222,42 +20755,7 @@ X"$ac_file" : 'X\(//\)[^/]' \| \ X"$ac_file" : 'X\(//\)$' \| \ X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || -echo X"$ac_file" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - { as_dir="$ac_dir" - case $as_dir in #( - -*) as_dir=./$as_dir;; - esac - test -d "$as_dir" || { $as_mkdir_p && mkdir -p "$as_dir"; } || { - as_dirs= - while :; do - case $as_dir in #( - *\'*) as_qdir=`echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #( - *) as_qdir=$as_dir;; - esac - as_dirs="'$as_qdir' $as_dirs" - as_dir=`$as_dirname -- "$as_dir" || -$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_dir" : 'X\(//\)[^/]' \| \ - X"$as_dir" : 'X\(//\)$' \| \ - X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || -echo X"$as_dir" | +$as_echo X"$ac_file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q @@ -20275,20 +20773,15 @@ q } s/.*/./; q'` - test -d "$as_dir" && break - done - test -z "$as_dirs" || eval "mkdir $as_dirs" - } || test -d "$as_dir" || { { echo "$as_me:$LINENO: error: cannot create directory $as_dir" >&5 -echo "$as_me: error: cannot create directory $as_dir" >&2;} - { (exit 1); exit 1; }; }; } + as_dir="$ac_dir"; as_fn_mkdir_p ac_builddir=. case "$ac_dir" in .) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; *) - ac_dir_suffix=/`echo "$ac_dir" | sed 's,^\.[\\/],,'` + ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` # A ".." for each directory in $ac_dir_suffix. - ac_top_builddir_sub=`echo "$ac_dir_suffix" | sed 's,/[^\\/]*,/..,g;s,/,,'` + ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` case $ac_top_builddir_sub in "") ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; @@ -20333,12 +20826,12 @@ esac _ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # If the template does not know about datarootdir, expand it. # FIXME: This hack should be removed a few years after 2.60. ac_datarootdir_hack=; ac_datarootdir_seen= - -case `sed -n '/datarootdir/ { +ac_sed_dataroot=' +/datarootdir/ { p q } @@ -20346,36 +20839,37 @@ /@docdir@/p /@infodir@/p /@localedir@/p -/@mandir@/p -' $ac_file_inputs` in +/@mandir@/p' +case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in *datarootdir*) ac_datarootdir_seen=yes;; *@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) - { echo "$as_me:$LINENO: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 -echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 +$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} _ACEOF -cat >>$CONFIG_STATUS <<_ACEOF +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_datarootdir_hack=' s&@datadir@&$datadir&g s&@docdir@&$docdir&g s&@infodir@&$infodir&g s&@localedir@&$localedir&g s&@mandir@&$mandir&g - s&\\\${datarootdir}&$datarootdir&g' ;; + s&\\\${datarootdir}&$datarootdir&g' ;; esac _ACEOF # Neutralize VPATH when `$srcdir' = `.'. # Shell code in configure.ac might set extrasub. # FIXME: do we really want to maintain this feature? -cat >>$CONFIG_STATUS <<_ACEOF - sed "$ac_vpsub +cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 +ac_sed_extra="$ac_vpsub $extrasub _ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF +cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 :t /@[a-zA-Z_][a-zA-Z_0-9]*@/!b -s&@configure_input@&$configure_input&;t t +s|@configure_input@|$ac_sed_conf_input|;t t s&@top_builddir@&$ac_top_builddir_sub&;t t +s&@top_build_prefix@&$ac_top_build_prefix&;t t s&@srcdir@&$ac_srcdir&;t t s&@abs_srcdir@&$ac_abs_srcdir&;t t s&@top_srcdir@&$ac_top_srcdir&;t t @@ -20386,135 +20880,66 @@ s&@INSTALL@&$ac_INSTALL&;t t s&@MKDIR_P@&$ac_MKDIR_P&;t t $ac_datarootdir_hack -" $ac_file_inputs | sed -f "$tmp/subs-1.sed" | sed -f "$tmp/subs-2.sed" >$tmp/out +" +eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ + >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && - { ac_out=`sed -n '/\${datarootdir}/p' "$tmp/out"`; test -n "$ac_out"; } && - { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' "$tmp/out"`; test -z "$ac_out"; } && - { echo "$as_me:$LINENO: WARNING: $ac_file contains a reference to the variable \`datarootdir' -which seems to be undefined. Please make sure it is defined." >&5 -echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' -which seems to be undefined. Please make sure it is defined." >&2;} + { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && + { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ + "$ac_tmp/out"`; test -z "$ac_out"; } && + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' +which seems to be undefined. Please make sure it is defined" >&5 +$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' +which seems to be undefined. Please make sure it is defined" >&2;} - rm -f "$tmp/stdin" + rm -f "$ac_tmp/stdin" case $ac_file in - -) cat "$tmp/out"; rm -f "$tmp/out";; - *) rm -f "$ac_file"; mv "$tmp/out" $ac_file;; - esac + -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; + *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; + esac \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; :H) # # CONFIG_HEADER # -_ACEOF - -# Transform confdefs.h into a sed script `conftest.defines', that -# substitutes the proper values into config.h.in to produce config.h. -rm -f conftest.defines conftest.tail -# First, append a space to every undef/define line, to ease matching. -echo 's/$/ /' >conftest.defines -# Then, protect against being on the right side of a sed subst, or in -# an unquoted here document, in config.status. If some macros were -# called several times there might be several #defines for the same -# symbol, which is useless. But do not sort them, since the last -# AC_DEFINE must be honored. -ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]* -# These sed commands are passed to sed as "A NAME B PARAMS C VALUE D", where -# NAME is the cpp macro being defined, VALUE is the value it is being given. -# PARAMS is the parameter list in the macro definition--in most cases, it's -# just an empty string. -ac_dA='s,^\\([ #]*\\)[^ ]*\\([ ]*' -ac_dB='\\)[ (].*,\\1define\\2' -ac_dC=' ' -ac_dD=' ,' - -uniq confdefs.h | - sed -n ' - t rset - :rset - s/^[ ]*#[ ]*define[ ][ ]*// - t ok - d - :ok - s/[\\&,]/\\&/g - s/^\('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/ '"$ac_dA"'\1'"$ac_dB"'\2'"${ac_dC}"'\3'"$ac_dD"'/p - s/^\('"$ac_word_re"'\)[ ]*\(.*\)/'"$ac_dA"'\1'"$ac_dB$ac_dC"'\2'"$ac_dD"'/p - ' >>conftest.defines - -# Remove the space that was appended to ease matching. -# Then replace #undef with comments. This is necessary, for -# example, in the case of _POSIX_SOURCE, which is predefined and required -# on some systems where configure will not decide to define it. -# (The regexp can be short, since the line contains either #define or #undef.) -echo 's/ $// -s,^[ #]*u.*,/* & */,' >>conftest.defines - -# Break up conftest.defines: -ac_max_sed_lines=50 - -# First sed command is: sed -f defines.sed $ac_file_inputs >"$tmp/out1" -# Second one is: sed -f defines.sed "$tmp/out1" >"$tmp/out2" -# Third one will be: sed -f defines.sed "$tmp/out2" >"$tmp/out1" -# et cetera. -ac_in='$ac_file_inputs' -ac_out='"$tmp/out1"' -ac_nxt='"$tmp/out2"' - -while : -do - # Write a here document: - cat >>$CONFIG_STATUS <<_ACEOF - # First, check the format of the line: - cat >"\$tmp/defines.sed" <<\\CEOF -/^[ ]*#[ ]*undef[ ][ ]*$ac_word_re[ ]*/b def -/^[ ]*#[ ]*define[ ][ ]*$ac_word_re[( ]/b def -b -:def -_ACEOF - sed ${ac_max_sed_lines}q conftest.defines >>$CONFIG_STATUS - echo 'CEOF - sed -f "$tmp/defines.sed"' "$ac_in >$ac_out" >>$CONFIG_STATUS - ac_in=$ac_out; ac_out=$ac_nxt; ac_nxt=$ac_in - sed 1,${ac_max_sed_lines}d conftest.defines >conftest.tail - grep . conftest.tail >/dev/null || break - rm -f conftest.defines - mv conftest.tail conftest.defines -done -rm -f conftest.defines conftest.tail - -echo "ac_result=$ac_in" >>$CONFIG_STATUS -cat >>$CONFIG_STATUS <<\_ACEOF if test x"$ac_file" != x-; then - echo "/* $configure_input */" >"$tmp/config.h" - cat "$ac_result" >>"$tmp/config.h" - if diff $ac_file "$tmp/config.h" >/dev/null 2>&1; then - { echo "$as_me:$LINENO: $ac_file is unchanged" >&5 -echo "$as_me: $ac_file is unchanged" >&6;} + { + $as_echo "/* $configure_input */" \ + && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" + } >"$ac_tmp/config.h" \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 + if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then + { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5 +$as_echo "$as_me: $ac_file is unchanged" >&6;} else - rm -f $ac_file - mv "$tmp/config.h" $ac_file + rm -f "$ac_file" + mv "$ac_tmp/config.h" "$ac_file" \ + || as_fn_error $? "could not create $ac_file" "$LINENO" 5 fi else - echo "/* $configure_input */" - cat "$ac_result" + $as_echo "/* $configure_input */" \ + && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \ + || as_fn_error $? "could not create -" "$LINENO" 5 fi - rm -f "$tmp/out12" -# Compute $ac_file's index in $config_headers. +# Compute "$ac_file"'s index in $config_headers. +_am_arg="$ac_file" _am_stamp_count=1 for _am_header in $config_headers :; do case $_am_header in - $ac_file | $ac_file:* ) + $_am_arg | $_am_arg:* ) break ;; * ) _am_stamp_count=`expr $_am_stamp_count + 1` ;; esac done -echo "timestamp for $ac_file" >`$as_dirname -- $ac_file || -$as_expr X$ac_file : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X$ac_file : 'X\(//\)[^/]' \| \ - X$ac_file : 'X\(//\)$' \| \ - X$ac_file : 'X\(/\)' \| . 2>/dev/null || -echo X$ac_file | +echo "timestamp for $_am_arg" >`$as_dirname -- "$_am_arg" || +$as_expr X"$_am_arg" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ + X"$_am_arg" : 'X\(//\)[^/]' \| \ + X"$_am_arg" : 'X\(//\)$' \| \ + X"$_am_arg" : 'X\(/\)' \| . 2>/dev/null || +$as_echo X"$_am_arg" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q @@ -20534,30 +20959,40 @@ s/.*/./; q'`/stamp-h$_am_stamp_count ;; - :C) { echo "$as_me:$LINENO: executing $ac_file commands" >&5 -echo "$as_me: executing $ac_file commands" >&6;} + :C) { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5 +$as_echo "$as_me: executing $ac_file commands" >&6;} ;; esac case $ac_file$ac_mode in - "depfiles":C) test x"$AMDEP_TRUE" != x"" || for mf in $CONFIG_FILES; do - # Strip MF so we end up with the name of the file. - mf=`echo "$mf" | sed -e 's/:.*$//'` - # Check whether this is an Automake generated Makefile or not. - # We used to match only the files named `Makefile.in', but - # some people rename them; so instead we look at the file content. - # Grep'ing the first line is not enough: some people post-process - # each Makefile.in and add a new line on top of each file to say so. - # Grep'ing the whole file is not good either: AIX grep has a line - # limit of 2048, but all sed's we know have understand at least 4000. - if sed 10q "$mf" | grep '^#.*generated by automake' > /dev/null 2>&1; then - dirpart=`$as_dirname -- "$mf" || + "depfiles":C) test x"$AMDEP_TRUE" != x"" || { + # Autoconf 2.62 quotes --file arguments for eval, but not when files + # are listed without --file. Let's play safe and only enable the eval + # if we detect the quoting. + case $CONFIG_FILES in + *\'*) eval set x "$CONFIG_FILES" ;; + *) set x $CONFIG_FILES ;; + esac + shift + for mf + do + # Strip MF so we end up with the name of the file. + mf=`echo "$mf" | sed -e 's/:.*$//'` + # Check whether this is an Automake generated Makefile or not. + # We used to match only the files named `Makefile.in', but + # some people rename them; so instead we look at the file content. + # Grep'ing the first line is not enough: some people post-process + # each Makefile.in and add a new line on top of each file to say so. + # Grep'ing the whole file is not good either: AIX grep has a line + # limit of 2048, but all sed's we know have understand at least 4000. + if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then + dirpart=`$as_dirname -- "$mf" || $as_expr X"$mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$mf" : 'X\(//\)[^/]' \| \ X"$mf" : 'X\(//\)$' \| \ X"$mf" : 'X\(/\)' \| . 2>/dev/null || -echo X"$mf" | +$as_echo X"$mf" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q @@ -20575,68 +21010,33 @@ q } s/.*/./; q'` - else - continue - fi - # Extract the definition of DEPDIR, am__include, and am__quote - # from the Makefile without running `make'. - DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` - test -z "$DEPDIR" && continue - am__include=`sed -n 's/^am__include = //p' < "$mf"` - test -z "am__include" && continue - am__quote=`sed -n 's/^am__quote = //p' < "$mf"` - # When using ansi2knr, U may be empty or an underscore; expand it - U=`sed -n 's/^U = //p' < "$mf"` - # Find all dependency output files, they are included files with - # $(DEPDIR) in their names. We invoke sed twice because it is the - # simplest approach to changing $(DEPDIR) to its actual value in the - # expansion. - for file in `sed -n " - s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ - sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do - # Make sure the directory exists. - test -f "$dirpart/$file" && continue - fdir=`$as_dirname -- "$file" || + else + continue + fi + # Extract the definition of DEPDIR, am__include, and am__quote + # from the Makefile without running `make'. + DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` + test -z "$DEPDIR" && continue + am__include=`sed -n 's/^am__include = //p' < "$mf"` + test -z "am__include" && continue + am__quote=`sed -n 's/^am__quote = //p' < "$mf"` + # When using ansi2knr, U may be empty or an underscore; expand it + U=`sed -n 's/^U = //p' < "$mf"` + # Find all dependency output files, they are included files with + # $(DEPDIR) in their names. We invoke sed twice because it is the + # simplest approach to changing $(DEPDIR) to its actual value in the + # expansion. + for file in `sed -n " + s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ + sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do + # Make sure the directory exists. + test -f "$dirpart/$file" && continue + fdir=`$as_dirname -- "$file" || $as_expr X"$file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$file" : 'X\(//\)[^/]' \| \ X"$file" : 'X\(//\)$' \| \ X"$file" : 'X\(/\)' \| . 2>/dev/null || -echo X"$file" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - { as_dir=$dirpart/$fdir - case $as_dir in #( - -*) as_dir=./$as_dir;; - esac - test -d "$as_dir" || { $as_mkdir_p && mkdir -p "$as_dir"; } || { - as_dirs= - while :; do - case $as_dir in #( - *\'*) as_qdir=`echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #( - *) as_qdir=$as_dir;; - esac - as_dirs="'$as_qdir' $as_dirs" - as_dir=`$as_dirname -- "$as_dir" || -$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_dir" : 'X\(//\)[^/]' \| \ - X"$as_dir" : 'X\(//\)$' \| \ - X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || -echo X"$as_dir" | +$as_echo X"$file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q @@ -20654,16 +21054,12 @@ q } s/.*/./; q'` - test -d "$as_dir" && break + as_dir=$dirpart/$fdir; as_fn_mkdir_p + # echo "creating $dirpart/$file" + echo '# dummy' > "$dirpart/$file" done - test -z "$as_dirs" || eval "mkdir $as_dirs" - } || test -d "$as_dir" || { { echo "$as_me:$LINENO: error: cannot create directory $as_dir" >&5 -echo "$as_me: error: cannot create directory $as_dir" >&2;} - { (exit 1); exit 1; }; }; } - # echo "creating $dirpart/$file" - echo '# dummy' > "$dirpart/$file" done -done +} ;; "libtool":C) @@ -20674,8 +21070,8 @@ fi cfgfile="${ofile}T" - trap "$RM -f \"$cfgfile\"; exit 1" 1 2 15 - $RM -f "$cfgfile" + trap "$RM \"$cfgfile\"; exit 1" 1 2 15 + $RM "$cfgfile" cat <<_LT_EOF >> "$cfgfile" #! $SHELL @@ -20799,16 +21195,36 @@ # turn newlines into spaces. NL2SP=$lt_lt_NL2SP +# convert \$build file names to \$host format. +to_host_file_cmd=$lt_cv_to_host_file_cmd + +# convert \$build files to toolchain format. +to_tool_file_cmd=$lt_cv_to_tool_file_cmd + # Method to check whether dependent libraries are shared objects. deplibs_check_method=$lt_deplibs_check_method -# Command to use when deplibs_check_method == "file_magic". +# Command to use when deplibs_check_method = "file_magic". file_magic_cmd=$lt_file_magic_cmd +# How to find potential files when deplibs_check_method = "file_magic". +file_magic_glob=$lt_file_magic_glob + +# Find potential files using nocaseglob when deplibs_check_method = "file_magic". +want_nocaseglob=$lt_want_nocaseglob + +# Command to associate shared and link libraries. +sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd + # The archiver. AR=$lt_AR + +# Flags to create an archive. AR_FLAGS=$lt_AR_FLAGS +# How to feed a file listing to the archiver. +archiver_list_spec=$lt_archiver_list_spec + # A symbol stripping program. STRIP=$lt_STRIP @@ -20838,6 +21254,12 @@ # Transform the output of nm in a C name address pair when lib prefix is needed. global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix +# Specify filename containing input files for \$NM. +nm_file_list_spec=$lt_nm_file_list_spec + +# The root where to search for dependent libraries,and in which our libraries should be installed. +lt_sysroot=$lt_sysroot + # The name of the directory that contains temporary libtool files. objdir=$objdir @@ -20847,6 +21269,9 @@ # Must we lock files when doing compilation? need_locks=$lt_need_locks +# Manifest tool. +MANIFEST_TOOL=$lt_MANIFEST_TOOL + # Tool to manipulate archived DWARF debug symbol files on Mac OS X. DSYMUTIL=$lt_DSYMUTIL @@ -20961,12 +21386,12 @@ # Compiler flag to turn off builtin functions. no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag -# How to pass a linker flag through the compiler. -wl=$lt_lt_prog_compiler_wl - # Additional compiler flags for building library objects. pic_flag=$lt_lt_prog_compiler_pic +# How to pass a linker flag through the compiler. +wl=$lt_lt_prog_compiler_wl + # Compiler flag to prevent dynamic linking. link_static_flag=$lt_lt_prog_compiler_static @@ -21053,9 +21478,6 @@ # Whether libtool must link a program against all its dependency libraries. link_all_deplibs=$link_all_deplibs -# Fix the shell variable \$srcfile for the compiler. -fix_srcfile_path=$lt_fix_srcfile_path - # Set to "yes" if exported symbols are required. always_export_symbols=$always_export_symbols @@ -21071,6 +21493,9 @@ # Commands necessary for linking programs (against libraries) with templates. prelink_cmds=$lt_prelink_cmds +# Commands necessary for finishing linking programs. +postlink_cmds=$lt_postlink_cmds + # Specify filename containing input files. file_list_spec=$lt_file_list_spec @@ -21117,210 +21542,169 @@ # if finds mixed CR/LF and LF-only lines. Since sed operates in # text mode, it properly converts lines to CR/LF. This bash problem # is reportedly fixed, but why not run on old versions too? - sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ - || (rm -f "$cfgfile"; exit 1) - - case $xsi_shell in - yes) - cat << \_LT_EOF >> "$cfgfile" - -# func_dirname file append nondir_replacement -# Compute the dirname of FILE. If nonempty, add APPEND to the result, -# otherwise set result to NONDIR_REPLACEMENT. -func_dirname () -{ - case ${1} in - */*) func_dirname_result="${1%/*}${2}" ;; - * ) func_dirname_result="${3}" ;; - esac -} - -# func_basename file -func_basename () -{ - func_basename_result="${1##*/}" -} - -# func_dirname_and_basename file append nondir_replacement -# perform func_basename and func_dirname in a single function -# call: -# dirname: Compute the dirname of FILE. If nonempty, -# add APPEND to the result, otherwise set result -# to NONDIR_REPLACEMENT. -# value returned in "$func_dirname_result" -# basename: Compute filename of FILE. -# value retuned in "$func_basename_result" -# Implementation must be kept synchronized with func_dirname -# and func_basename. For efficiency, we do not delegate to -# those functions but instead duplicate the functionality here. -func_dirname_and_basename () -{ - case ${1} in - */*) func_dirname_result="${1%/*}${2}" ;; - * ) func_dirname_result="${3}" ;; - esac - func_basename_result="${1##*/}" -} - -# func_stripname prefix suffix name -# strip PREFIX and SUFFIX off of NAME. -# PREFIX and SUFFIX must not contain globbing or regex special -# characters, hashes, percent signs, but SUFFIX may contain a leading -# dot (in which case that matches only a dot). -func_stripname () -{ - # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are - # positional parameters, so assign one to ordinary parameter first. - func_stripname_result=${3} - func_stripname_result=${func_stripname_result#"${1}"} - func_stripname_result=${func_stripname_result%"${2}"} -} - -# func_opt_split -func_opt_split () -{ - func_opt_split_opt=${1%%=*} - func_opt_split_arg=${1#*=} -} - -# func_lo2o object -func_lo2o () -{ - case ${1} in - *.lo) func_lo2o_result=${1%.lo}.${objext} ;; - *) func_lo2o_result=${1} ;; - esac -} - -# func_xform libobj-or-source -func_xform () -{ - func_xform_result=${1%.*}.lo -} - -# func_arith arithmetic-term... -func_arith () -{ - func_arith_result=$(( $* )) -} - -# func_len string -# STRING may not start with a hyphen. -func_len () -{ - func_len_result=${#1} -} - -_LT_EOF - ;; - *) # Bourne compatible functions. - cat << \_LT_EOF >> "$cfgfile" - -# func_dirname file append nondir_replacement -# Compute the dirname of FILE. If nonempty, add APPEND to the result, -# otherwise set result to NONDIR_REPLACEMENT. -func_dirname () -{ - # Extract subdirectory from the argument. - func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` - if test "X$func_dirname_result" = "X${1}"; then - func_dirname_result="${3}" - else - func_dirname_result="$func_dirname_result${2}" - fi -} - -# func_basename file -func_basename () -{ - func_basename_result=`$ECHO "${1}" | $SED "$basename"` -} - - -# func_stripname prefix suffix name -# strip PREFIX and SUFFIX off of NAME. -# PREFIX and SUFFIX must not contain globbing or regex special -# characters, hashes, percent signs, but SUFFIX may contain a leading -# dot (in which case that matches only a dot). -# func_strip_suffix prefix name -func_stripname () -{ - case ${2} in - .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; - *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; - esac -} - -# sed scripts: -my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q' -my_sed_long_arg='1s/^-[^=]*=//' - -# func_opt_split -func_opt_split () -{ - func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"` - func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"` -} - -# func_lo2o object -func_lo2o () -{ - func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` -} - -# func_xform libobj-or-source -func_xform () -{ - func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` -} - -# func_arith arithmetic-term... -func_arith () -{ - func_arith_result=`expr "$@"` -} - -# func_len string -# STRING may not start with a hyphen. -func_len () -{ - func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len` -} - -_LT_EOF -esac - -case $lt_shell_append in - yes) - cat << \_LT_EOF >> "$cfgfile" - -# func_append var value -# Append VALUE to the end of shell variable VAR. -func_append () -{ - eval "$1+=\$2" -} -_LT_EOF - ;; - *) - cat << \_LT_EOF >> "$cfgfile" - -# func_append var value -# Append VALUE to the end of shell variable VAR. -func_append () -{ - eval "$1=\$$1\$2" -} - -_LT_EOF - ;; - esac + sed '$q' "$ltmain" >> "$cfgfile" \ + || (rm -f "$cfgfile"; exit 1) + if test x"$xsi_shell" = xyes; then + sed -e '/^func_dirname ()$/,/^} # func_dirname /c\ +func_dirname ()\ +{\ +\ case ${1} in\ +\ */*) func_dirname_result="${1%/*}${2}" ;;\ +\ * ) func_dirname_result="${3}" ;;\ +\ esac\ +} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + sed -e '/^func_basename ()$/,/^} # func_basename /c\ +func_basename ()\ +{\ +\ func_basename_result="${1##*/}"\ +} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\ +func_dirname_and_basename ()\ +{\ +\ case ${1} in\ +\ */*) func_dirname_result="${1%/*}${2}" ;;\ +\ * ) func_dirname_result="${3}" ;;\ +\ esac\ +\ func_basename_result="${1##*/}"\ +} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + sed -e '/^func_stripname ()$/,/^} # func_stripname /c\ +func_stripname ()\ +{\ +\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\ +\ # positional parameters, so assign one to ordinary parameter first.\ +\ func_stripname_result=${3}\ +\ func_stripname_result=${func_stripname_result#"${1}"}\ +\ func_stripname_result=${func_stripname_result%"${2}"}\ +} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\ +func_split_long_opt ()\ +{\ +\ func_split_long_opt_name=${1%%=*}\ +\ func_split_long_opt_arg=${1#*=}\ +} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\ +func_split_short_opt ()\ +{\ +\ func_split_short_opt_arg=${1#??}\ +\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\ +} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\ +func_lo2o ()\ +{\ +\ case ${1} in\ +\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\ +\ *) func_lo2o_result=${1} ;;\ +\ esac\ +} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + sed -e '/^func_xform ()$/,/^} # func_xform /c\ +func_xform ()\ +{\ + func_xform_result=${1%.*}.lo\ +} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + sed -e '/^func_arith ()$/,/^} # func_arith /c\ +func_arith ()\ +{\ + func_arith_result=$(( $* ))\ +} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + sed -e '/^func_len ()$/,/^} # func_len /c\ +func_len ()\ +{\ + func_len_result=${#1}\ +} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + +fi + +if test x"$lt_shell_append" = xyes; then + sed -e '/^func_append ()$/,/^} # func_append /c\ +func_append ()\ +{\ + eval "${1}+=\\${2}"\ +} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\ +func_append_quoted ()\ +{\ +\ func_quote_for_eval "${2}"\ +\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\ +} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: + + + # Save a `func_append' function call where possible by direct use of '+=' + sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") + test 0 -eq $? || _lt_function_replace_fail=: +else + # Save a `func_append' function call even when '+=' is not available + sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") + test 0 -eq $? || _lt_function_replace_fail=: +fi + +if test x"$_lt_function_replace_fail" = x":"; then + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5 +$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;} +fi - sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ - || (rm -f "$cfgfile"; exit 1) - mv -f "$cfgfile" "$ofile" || + mv -f "$cfgfile" "$ofile" || (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") chmod +x "$ofile" @@ -21348,12 +21732,12 @@ # Compiler flag to turn off builtin functions. no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_CXX -# How to pass a linker flag through the compiler. -wl=$lt_lt_prog_compiler_wl_CXX - # Additional compiler flags for building library objects. pic_flag=$lt_lt_prog_compiler_pic_CXX +# How to pass a linker flag through the compiler. +wl=$lt_lt_prog_compiler_wl_CXX + # Compiler flag to prevent dynamic linking. link_static_flag=$lt_lt_prog_compiler_static_CXX @@ -21440,9 +21824,6 @@ # Whether libtool must link a program against all its dependency libraries. link_all_deplibs=$link_all_deplibs_CXX -# Fix the shell variable \$srcfile for the compiler. -fix_srcfile_path=$lt_fix_srcfile_path_CXX - # Set to "yes" if exported symbols are required. always_export_symbols=$always_export_symbols_CXX @@ -21458,6 +21839,9 @@ # Commands necessary for linking programs (against libraries) with templates. prelink_cmds=$lt_prelink_cmds_CXX +# Commands necessary for finishing linking programs. +postlink_cmds=$lt_postlink_cmds_CXX + # Specify filename containing input files. file_list_spec=$lt_file_list_spec_CXX @@ -21487,11 +21871,13 @@ done # for ac_tag -{ (exit 0); exit 0; } +as_fn_exit 0 _ACEOF -chmod +x $CONFIG_STATUS ac_clean_files=$ac_clean_files_save +test $ac_write_fail = 0 || + as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 + # configure is writing to config.log, and then calls config.status. # config.status does its own redirection, appending to config.log. @@ -21511,7 +21897,11 @@ exec 5>>config.log # Use ||, not &&, to avoid exiting from the if with $? = 1, which # would make configure fail if this is the last instruction. - $ac_cs_success || { (exit 1); exit 1; } + $ac_cs_success || as_fn_exit 1 +fi +if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then + { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 +$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} fi @@ -21522,14 +21912,18 @@ # identifies this dastardly mix as a unix variant, and libtool kindly # passes incorrect flags and names through to the MS linker. The simplest fix # is to modify libtool via sed to remove those options. +# # As this is only done once at first configure, and subsequent config or source # changes may trigger a silent reversion to the non-functioning original. +# # Changes are; +# # 1. replace LIB$name with $name in libname_spec (e.g. libicu -> icu) to ensure # correct windows versions of .lib and .dlls are found or generated. # 2. remove incompatible \w-link\w from archive_cmds # 3. remove GNU-style directives to be passed through to the linker # 4. swap GNU-style shared library flags with MS -dll variant +# # This obscene hackery is tracked under COUCHDB-440 and COUCHDB-1197. if test x${IS_WINDOWS} = xTRUE; then diff -Nru couchdb-1.2.0/configure.ac couchdb-1.4.0~rc.1/configure.ac --- couchdb-1.2.0/configure.ac 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/configure.ac 2013-08-23 10:57:21.000000000 -0400 @@ -10,27 +10,29 @@ dnl License for the specific language governing permissions and limitations dnl under the License. -AC_INIT([LOCAL_PACKAGE_NAME], [LOCAL_VERSION], [LOCAL_BUG_URI], +AC_INIT( + [LOCAL_PACKAGE_NAME], + [LOCAL_VERSION], + [LOCAL_BUG_URI], [LOCAL_PACKAGE_TARNAME]) -AC_PREREQ([2.59]) +AC_PREREQ([2.63]) -AC_CONFIG_SRCDIR([CHANGES]) AC_CONFIG_AUX_DIR([build-aux]) AC_CONFIG_MACRO_DIR([m4]) -AM_CONFIG_HEADER([config.h]) +AC_CONFIG_HEADERS([config.h]) AC_CONFIG_HEADERS([src/snappy/google-snappy/config.h]) AM_INIT_AUTOMAKE([1.6.3 foreign]) -AC_GNU_SOURCE +AC_USE_SYSTEM_EXTENSIONS AC_ENABLE_SHARED AC_DISABLE_STATIC AC_PROG_CC LT_INIT([win32-dll]) -AC_PROG_LIBTOOL +LT_INIT AC_PROG_LN_S PKG_PROG_PKG_CONFIG @@ -42,38 +44,44 @@ AC_PROG_CXX AC_LANG([C++]) +AC_LINK_IFELSE([AC_LANG_PROGRAM([], [])],, + AC_MSG_ERROR([A C++ compiler is required.])) AC_C_BIGENDIAN AC_CHECK_HEADERS([stdint.h stddef.h sys/mman.h sys/resource.h]) AC_CHECK_FUNC([mmap]) AC_MSG_CHECKING([if the compiler supports __builtin_expect]) -AC_TRY_COMPILE(, [ +AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[ return __builtin_expect(1, 1) ? 1 : 0 -], [ +]])],[ snappy_have_builtin_expect=yes AC_MSG_RESULT([yes]) -], [ +],[ snappy_have_builtin_expect=no AC_MSG_RESULT([no]) ]) + if test x$snappy_have_builtin_expect = xyes ; then - AC_DEFINE([HAVE_BUILTIN_EXPECT], [1], [Define to 1 if the compiler supports __builtin_expect.]) + AC_DEFINE([HAVE_BUILTIN_EXPECT], [1], + [Define to 1 if the compiler supports __builtin_expect.]) fi AC_MSG_CHECKING([if the compiler supports __builtin_ctzll]) -AC_TRY_COMPILE(, [ +AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[]], [[ return (__builtin_ctzll(0x100000000LL) == 32) ? 1 : 0 -], [ +]])],[ snappy_have_builtin_ctz=yes AC_MSG_RESULT([yes]) -], [ +],[ snappy_have_builtin_ctz=no AC_MSG_RESULT([no]) ]) + if test x$snappy_have_builtin_ctz = xyes ; then - AC_DEFINE([HAVE_BUILTIN_CTZ], [1], [Define to 1 if the compiler supports __builtin_ctz and friends.]) + AC_DEFINE([HAVE_BUILTIN_CTZ], [1], + [Define to 1 if the compiler supports __builtin_ctz and friends.]) fi if test "$ac_cv_header_stdint_h" = "yes"; then @@ -94,16 +102,18 @@ AC_SUBST([SNAPPY_MAJOR]) AC_SUBST([SNAPPY_MINOR]) AC_SUBST([SNAPPY_PATCHLEVEL]) -dnl End of google snappy specific config AC_MSG_CHECKING([for pthread_create in -lpthread]) original_LIBS="$LIBS" LIBS="-lpthread $original_LIBS" -AC_TRY_LINK([#include], - [pthread_create((void *)0, (void *)0, (void *)0, (void *)0)], - [pthread=yes], [pthread=no]) +AC_LINK_IFELSE([ + AC_LANG_PROGRAM( + [[#include]], + [[pthread_create((void *)0, (void *)0, (void *)0, (void *)0)]])], + [pthread=yes], + [pthread=no]) if test x${pthread} = xyes; then AC_MSG_RESULT([yes]) @@ -112,193 +122,174 @@ AC_MSG_RESULT([no]) fi -AC_ARG_VAR([ERLC_FLAGS], [general flags to prepend to ERLC_FLAGS]) -AC_ARG_VAR([FLAGS], [general flags to prepend to LDFLAGS and CPPFLAGS]) -AS_CASE([$(uname -s)], - [CYGWIN*], [] , - [*], [ - CPPFLAGS="$CPPFLAGS -I/opt/local/include" - CPPFLAGS="$CPPFLAGS -I/usr/local/include" - CPPFLAGS="$CPPFLAGS -I/usr/include" - LDFLAGS="$LDFLAGS -L/opt/local/lib" - LDFLAGS="$LDFLAGS -L/usr/local/lib" -]) -CPPFLAGS="$CPPFLAGS $FLAGS" -LDFLAGS="$LDFLAGS $FLAGS" - AC_PATH_PROG([ERL], [erl]) + AS_IF([test x${ERL} = x], [ AC_MSG_ERROR([Could not find the `erl' executable. Is Erlang installed?]) - ]) +]) -AC_ARG_WITH([erlang], [AC_HELP_STRING([--with-erlang=PATH], - [set PATH to the Erlang include directory])], [ +AC_ARG_WITH([erlang], + [AS_HELP_STRING([--with-erlang=PATH], + [set PATH to the Erlang include directory]) +], [ ERLANG_FLAGS="-I$withval" ], [ realerl=`readlink -f $ERL 2>/dev/null` AS_IF([test $? -eq 0], [ - erlbase=`dirname $realerl` - erlbase=`dirname $erlbase` - ERLANG_FLAGS="-I${erlbase}/usr/include" - ], [ - # Failed to figure out where erl is installed.. - # try to add some default directories to search - ERLANG_FLAGS="-I${libdir}/erlang/usr/include" - ERLANG_FLAGS="$ERLANG_FLAGS -I/usr/lib/erlang/usr/include" - ERLANG_FLAGS="$ERLANG_FLAGS -I/usr/local/lib/erlang/usr/include" - ERLANG_FLAGS="$ERLANG_FLAGS -I/opt/local/lib/erlang/usr/include" - ]) + erlbase=`dirname $realerl` + erlbase=`dirname $erlbase` + ERLANG_FLAGS="-I${erlbase}/usr/include" + ], [ + # Failed to figure out where erl is installed.. + # try to add some default directories to search + ERLANG_FLAGS="-I${libdir}/erlang/usr/include" + ERLANG_FLAGS="$ERLANG_FLAGS -I/usr/lib/erlang/usr/include" + ERLANG_FLAGS="$ERLANG_FLAGS -I/usr/local/lib/erlang/usr/include" + ERLANG_FLAGS="$ERLANG_FLAGS -I/opt/local/lib/erlang/usr/include" + ]) ]) + AC_SUBST(ERLANG_FLAGS) -PKG_CHECK_EXISTS([mozjs185], [ - PKG_CHECK_EXISTS([nspr], [], [ - AC_MSG_WARN([ - -You have the pkg-config file for mozjs185 isntalled but no pkg-config -file for NSPR. More than likely configure will fail. If it does it most -likely means you need to find on install the pkg-config file for NSPR. -This most commonly occurs on Mac OS X with older versions of Homebrew. - -You can correct this by removing SpiderMonkey and NSPR, updating -Homebrew and reinstalling. - -])])]) - -AC_ARG_WITH([js-lib], [AC_HELP_STRING([--with-js-lib=PATH], - [set PATH to the SpiderMonkey library directory])], - [ - JS_LIB_DIR=$withval -], [ - PKG_CHECK_MODULES([JS185], [mozjs185], [ +PKG_CHECK_MODULES([JS], [mozjs185], [ JS_LIB_DIR="$(${PKG_CONFIG} --variable=libdir mozjs185)" +], [ + PKG_CHECK_MODULES([JS], [mozilla-js >= 1.7], [ + JS_LIB_DIR="$(${PKG_CONFIG} --variable=sdkdir mozilla-js)/lib" ], [ - PKG_CHECK_MODULES([JS], [mozilla-js >= 1.7], [ - JS_LIB_DIR="$(${PKG_CONFIG} --variable=sdkdir mozilla-js)/lib" - ], [ - JS_LIB_DIR="${libdir}" - ]) + JS_LIB_DIR="${libdir}" + JS_CFLAGS="-I/usr/include" + JS_CFLAGS="$JS_CFLAGS -I/usr/include/js" + JS_CFLAGS="$JS_CFLAGS -I/usr/include/mozjs" + JS_CFLAGS="$JS_CFLAGS -I/usr/local/include/js" + JS_CFLAGS="$JS_CFLAGS -I/opt/local/include/js" ]) ]) -JS_LDFLAGS="-L$JS_LIB_DIR $LDFLAGS" - -AC_ARG_WITH([js-include], [AC_HELP_STRING([--with-js-include=PATH], - [set PATH to the SpiderMonkey include directory])], [ +AC_ARG_WITH([js-include], + [AS_HELP_STRING([--with-js-include=PATH], + [set PATH to the SpiderMonkey include directory]) +], [ JS_INCLUDE="$withval" JS_CFLAGS="-I$JS_INCLUDE" - JS_CFLAGS="$JS_CFLAGS -I$JS_INCLUDE/js" - JS_CFLAGS="$JS_CFLAGS -I$JS_INCLUDE/mozjs" +], []) + +AC_ARG_WITH([js-lib], + [AS_HELP_STRING([--with-js-lib=PATH], + [set PATH to the SpiderMonkey library directory]) ], [ - PKG_CHECK_MODULES([JS185], [mozjs185], [ - JS_CFLAGS="$(${PKG_CONFIG} --cflags mozjs185)" - ], [ - PKG_CHECK_MODULES([JS], [mozilla-js >= 1.7], [ - JS_CFLAGS="$(${PKG_CONFIG} --cflags mozilla-js)" - ], [ - JS_CFLAGS="-I/usr/include" - JS_CFLAGS="$JS_CFLAGS -I/usr/include/js" - JS_CFLAGS="$JS_CFLAGS -I/usr/include/mozjs" - JS_CFLAGS="$JS_CFLAGS -I/usr/local/include/js" - JS_CFLAGS="$JS_CFLAGS -I/opt/local/include/js" - ]) - ]) -]) + JS_LIB_DIR=$withval + JS_LIBS="-L$withval" +], []) use_js_trunk=no -AC_ARG_ENABLE([js-trunk], [AC_HELP_STRING([--enable-js-trunk], - [allow use of SpiderMonkey versions newer than js185-1.0.0])], [ + +AC_ARG_ENABLE([js-trunk], + [AS_HELP_STRING([--enable-js-trunk], + [allow use of SpiderMonkey versions newer than js185-1.0.0]) +], [ use_js_trunk=$enableval ], []) +AC_ARG_VAR([ERLC_FLAGS], [general flags to prepend to ERLC_FLAGS]) +AC_ARG_VAR([FLAGS], [general flags to prepend to LDFLAGS and CPPFLAGS]) + +AS_CASE([$(uname -s)], [CYGWIN*], [] , [*], [ + CPPFLAGS="$CPPFLAGS -I/opt/local/include" + CPPFLAGS="$CPPFLAGS -I/opt/local/include/js" + CPPFLAGS="$CPPFLAGS -I/usr/local/include" + CPPFLAGS="$CPPFLAGS -I/usr/local/include/js" + CPPFLAGS="$CPPFLAGS -I/usr/include" + CPPFLAGS="$CPPFLAGS -I/usr/include/js" + LDFLAGS="$LDFLAGS -L/opt/local/lib" + LDFLAGS="$LDFLAGS -L/usr/local/lib" +]) + +CPPFLAGS="$CPPFLAGS $FLAGS" +LDFLAGS="$LDFLAGS $FLAGS" + # The erlang cc.sh/ld.sh scripts will convert a -O option # into the same optimization flags erlang itself uses. CFLAGS="-O2 $CFLAGS" LIBS="$LIBS $LDFLAGS" -AS_CASE([$(uname -s)], - [CYGWIN*], [ - JS_CFLAGS="-DXP_WIN $JS_CFLAGS" - IS_WINDOWS="TRUE" - ] , - [*], [ - # XP_UNIX required for jsapi.h and has been tested to work on Linux and Darwin. - JS_CFLAGS="-DXP_UNIX $JS_CFLAGS" +AS_CASE([$(uname -s)], [CYGWIN*], [ + JS_CFLAGS="-DXP_WIN $JS_CFLAGS" + IS_WINDOWS="TRUE" +], [*], [ + # XP_UNIX required for jsapi.h, tested on Linux and Darwin. + JS_CFLAGS="-DXP_UNIX $JS_CFLAGS" ]) AM_CONDITIONAL([WINDOWS], [test x$IS_WINDOWS = xTRUE]) -OLD_CPPFLAGS="$CPPFLAGS" -OLD_LDFLAGS="$LDFLAGS" OLD_LIBS="$LIBS" -CPPFLAGS="$JS_CFLAGS $CPPFLAGS" -LDFLAGS="$JS_LDFLAGS" LIBS="$JS_LIBS $LIBS" +OLD_CPPFLAGS="$CPPFLAGS" +CPPFLAGS="$JS_CFLAGS $CPPFLAGS" + AC_CHECK_HEADER([jsapi.h], [], [ - AC_CHECK_HEADER([js/jsapi.h], - [ + AC_CHECK_HEADER([js/jsapi.h], [ CPPFLAGS="$CPPFLAGS -I$JS_INCLUDE/js" - ], - [ - AC_MSG_ERROR([Could not find the jsapi header. + ], [ + AC_MSG_ERROR([Could not find the jsapi header. Are the Mozilla SpiderMonkey headers installed?]) - ])]) - -AC_ARG_WITH([js-lib-name], [AC_HELP_STRING([--with-js-lib-name=NAME], - [set Spidermonkey library NAME])], [ - JS_LIB_BASE="$withval" - AC_CHECK_LIB([$JS_LIB_BASE], JS_NewObject, [], [ - AC_MSG_ERROR([Could not find the Spidermonkey library. - -Did you specify the correct library name?])]) - ], [ - AC_CHECK_LIB(mozjs, [JS_NewObject], [JS_LIB_BASE=mozjs], [ - AC_CHECK_LIB(js, [JS_NewObject], [JS_LIB_BASE=js], [ - AC_CHECK_LIB([js3250], [JS_NewObject], [JS_LIB_BASE=js3250], [ - AC_CHECK_LIB([js32], [JS_NewObject], [JS_LIB_BASE=js32], [ - AC_CHECK_LIB([mozjs185-1.0], [JS_NewObject], [JS_LIB_BASE=mozjs185-1.0], [ - AC_CHECK_LIB(mozjs185, [JS_NewObject], [JS_LIB_BASE=mozjs185], [ - AC_MSG_ERROR([Could not find the js library. + ]) +]) -Is the Mozilla SpiderMonkey library installed?])])])])])])]) +AC_CHECK_LIB([mozjs185], [JS_NewContext], [JS_LIB_BASE=mozjs185], [ + AC_CHECK_LIB([mozjs185-1.0], [JS_NewContext], [JS_LIB_BASE=mozjs185-1.0], [ + AC_CHECK_LIB([mozjs], [JS_NewContext], [JS_LIB_BASE=mozjs], [ + AC_CHECK_LIB([js], [JS_NewContext], [JS_LIB_BASE=js], [ + AC_CHECK_LIB([js3250], [JS_NewContext], [JS_LIB_BASE=js3250], [ + AC_CHECK_LIB([js32], [JS_NewContext], [JS_LIB_BASE=js32], [ + AC_MSG_ERROR([Could not find the js library. + +Is the Mozilla SpiderMonkey library installed?]) + ]) + ]) + ]) + ]) + ]) ]) # Figure out what version of SpiderMonkey to use AC_CHECK_LIB([$JS_LIB_BASE], [JS_NewCompartmentAndGlobalObject], # Prevent people from accidentally using SpiderMonkey's that are too new - if test "$use_js_trunk" = "no"; then AC_CHECK_DECL([JSOPTION_ANONFUNFIX], [], [ AC_MSG_ERROR([Your SpiderMonkey library is too new. -NOTE: Check above for an error about NSPR - Versions of SpiderMonkey after the js185-1.0.0 release remove the optional enforcement of preventing anonymous functions in a statement context. This will most likely break your existing JavaScript code as well as render all example code invalid. -If you wish to ignore this error pass --enable-js-trunk to ./configure.])], - [[#include ]]) +If you wish to ignore this error pass --enable-js-trunk to ./configure.]) + ], [ + [#include ] + ]) fi - AC_DEFINE([SM185], [1], - [Use SpiderMonkey 1.8.5])) + AC_DEFINE([SM185], [1], [Use SpiderMonkey 1.8.5]) +) AC_CHECK_LIB([$JS_LIB_BASE], [JS_ThrowStopIteration], AC_DEFINE([SM180], [1], - [Use SpiderMonkey 1.8.0])) + [Use SpiderMonkey 1.8.0]) +) AC_CHECK_LIB([$JS_LIB_BASE], [JS_GetStringCharsAndLength], AC_DEFINE([HAVE_JS_GET_STRING_CHARS_AND_LENGTH], [1], - [Use newer JS_GetCharsAndLength function.])) + [Use newer JS_GetCharsAndLength function.]) +) # Else, hope that 1.7.0 works -# Deal with JSScript -> JSObject -> JSScript switcheroo +# Deal with JSScript to JSObject to JSScript switcheroo AC_CHECK_TYPE([JSScript*], [AC_DEFINE([JSSCRIPT_TYPE], [JSScript*], [Use JSObject* for scripts])], @@ -309,9 +300,7 @@ AC_DEFINE([COUCHJS_NAME], ["couchjs"], ["CouchJS executable name."]) if test x${IS_WINDOWS} = xTRUE; then - AC_DEFINE([COUCHJS_NAME], ["couchjs.exe"], ["CouchJS executable name."]) - if test -f "$JS_LIB_DIR/$JS_LIB_BASE.dll"; then # seamonkey 1.7- build layout on Windows JS_LIB_BINARY="$JS_LIB_DIR/$JS_LIB_BASE.dll" @@ -324,54 +313,55 @@ fi fi AC_SUBST(JS_LIB_BINARY) - # On windows we need to know the path to the openssl binaries. - AC_ARG_WITH([openssl-bin-dir], [AC_HELP_STRING([--with-openssl-bin-dir=PATH], - [path to the open ssl binaries for distribution on Windows])], [ + AC_ARG_WITH([openssl-bin-dir], + [AS_HELP_STRING([--with-openssl-bin-dir=PATH], + [path to the open ssl binaries for distribution on Windows]) + ], [ openssl_bin_dir=`cygpath -m "$withval"` AC_SUBST(openssl_bin_dir) ], []) - # Windows uses Inno setup - look for its compiler. AC_PATH_PROG([INNO_COMPILER_EXECUTABLE], [iscc]) if test x${INNO_COMPILER_EXECUTABLE} = x; then AC_MSG_WARN([You will be unable to build the Windows installer.]) fi - # We need the msvc redistributables for this platform too # (in theory we could just install the assembly locally - but # there are at least 4 directories with binaries, meaning 4 copies; # so using the redist .exe means it ends up installed globally...) - AC_ARG_WITH([msvc-redist-dir], [AC_HELP_STRING([--with-msvc-redist-dir=PATH], - [path to the msvc redistributables for the Windows platform])], [ + AC_ARG_WITH([msvc-redist-dir], + [AS_HELP_STRING([--with-msvc-redist-dir=PATH], + [path to the msvc redistributables for the Windows platform]) + ], [ msvc_redist_dir=`cygpath -m "$withval"` msvc_redist_name="vcredist_x86.exe" AC_SUBST(msvc_redist_dir) AC_SUBST(msvc_redist_name) ], []) if test ! -f ${msvc_redist_dir}/${msvc_redist_name}; then - AC_MSG_WARN([The MSVC redistributable seems to be missing; expect the installer to fail.]) + AC_MSG_WARN([Installer may fail due to missing MSVC redistributable.]) fi fi -JS_CFLAGS="$CPPFLAGS" -JS_LDFLAGS="$LDFLAGS" -JS_LIBS="-l$JS_LIB_BASE -lm $LIBS" -CPPFLAGS="$OLD_CPPFLAGS" -LDFLAGS="$OLD_LDFLAGS" -LIBS="$OLD_LIBS" +JS_LIBS="-l$JS_LIB_BASE -lm $JS_LIBS" -AC_SUBST(JS_CFLAGS) -AC_SUBST(JS_LDFLAGS) AC_SUBST(JS_LIBS) -AC_ARG_WITH([win32-icu-binaries], [AC_HELP_STRING([--with-win32-icu-binaries=PATH], - [set PATH to the Win32 native ICU binaries directory])], [ +LIBS="$OLD_LIBS" +CPPFLAGS="$OLD_CPPFLAGS" + +%% auto detect "kegged" icu4c on Mac / Homebrew +PATH="/usr/local/opt/icu4c/bin:$PATH" +AC_ARG_WITH([win32-icu-binaries], + [AS_HELP_STRING([--with-win32-icu-binaries=PATH], + [set PATH to the Win32 native ICU binaries directory]) +], [ ICU_CPPFLAGS="-I$withval/include" ICU_LIBS="-L$withval/lib -licuuc -licudt -licuin" ICU_BIN=$withval/bin ], [ - AC_CHECK_ICU([3.4.1]) + AX_CHECK_ICU([3.4.1]) ICU_BIN= ]) @@ -381,16 +371,20 @@ AC_SUBST(ICU_BIN) use_curl=yes -AC_ARG_WITH([win32-curl], [AC_HELP_STRING([--with-win32-curl=PATH], - [set PATH to the Win32 native curl directory])], [ + +AC_ARG_WITH([win32-curl], + [AS_HELP_STRING([--with-win32-curl=PATH], + [set PATH to the Win32 native curl directory]) +], [ # default build on windows is a static lib, and that's what we want too CURL_CFLAGS="-I$withval/include -DCURL_STATICLIB" CURL_LIBS="-L$withval/lib -llibcurl -lWs2_32 -lkernel32 -luser32 -ladvapi32 -lWldap32" # OpenSSL libraries may be pulled in via libcurl if it was built with SSL # these are libeay32 ssleay32 instead of crypto ssl on unix ], [ - AC_CHECK_CURL([7.18.0], - [AC_DEFINE([HAVE_CURL], [1], ["Provide HTTP support to couchjs"])], [ + AX_LIB_CURL([7.18.0],[ + AC_DEFINE([HAVE_CURL], [1], ["Provide HTTP support to couchjs"]) + ],[ AC_MSG_WARN([You will be unable to run some JavaScript unit tests.]) use_curl=no CURL_LIBS= @@ -414,74 +408,116 @@ esac -erlang_version_error="The installed Erlang version is less than 5.6.5 (R12B05)." +{ $as_echo "$as_me:${as_lineno-$LINENO}: checking Erlang version compatibility" >&5 +$as_echo_n "checking Erlang version compatibility... " >&6; } +erlang_version_error="The installed Erlang version must be >= R14B (erts-5.8.1) and xR13B03]) AM_CONDITIONAL([USE_EJSON_COMPARE_NIF], [test x$otp_release \> xR14B03]) -has_crypto=`${ERL} -eval "case application:load(crypto) of ok -> ok; _ -> exit(no_crypto) end." -noshell -s init stop` +has_crypto=`\ + ${ERL} -eval "\ + case application:load(crypto) of ok -> ok; _ -> exit(no_crypto) end. \ + " -noshell -s init stop` if test -n "$has_crypto"; then - AC_MSG_ERROR([Could not find the Erlang crypto library. Has Erlang been compiled with OpenSSL support?]) + AC_MSG_ERROR([Could not find the Erlang crypto library. + +Has Erlang been compiled with OpenSSL support?]) fi AC_PATH_PROG([ERLC], [erlc]) if test x${ERLC} = x; then - AC_MSG_ERROR([Could not find the `erlc' executable. Is Erlang installed?]) + AC_MSG_ERROR([Could not find the `erlc' executable. + +Is Erlang installed?]) fi OLD_CPPFLAGS="$CPPFLAGS" CPPFLAGS="$ERLANG_FLAGS $CPPFLAGS" + AC_CHECK_HEADER([erl_driver.h], [], [ AC_MSG_ERROR([Could not find the `erl_driver.h' header. -Are the Erlang headers installed? Use the `--with-erlang' option to specify the -path to the Erlang include directory.])]) -CPPFLAGS="$OLD_CPPFLAGS" +Are the Erlang headers installed? -AC_PATH_PROG([HELP2MAN_EXECUTABLE], [help2man]) -if test x${HELP2MAN_EXECUTABLE} = x; then - AC_MSG_WARN([You will be unable to regenerate any man pages.]) -fi +Use the `--with-erlang' option to specify the Erlang include directory.]) +]) + +CPPFLAGS="$OLD_CPPFLAGS" use_init=yes use_launchd=yes native_mochijson_enabled=no - -AC_ARG_ENABLE([init], [AC_HELP_STRING([--disable-init], - [don't install init script where applicable])], [ +tests_enabled=yes +docs_enabled=yes +strictness_enabled=no + +AC_ARG_ENABLE([init], + [AS_HELP_STRING([--disable-init], + [don't install init script where applicable]) +], [ use_init=$enableval ], []) -AC_ARG_ENABLE([launchd], [AC_HELP_STRING([--disable-launchd], - [don't install launchd configuration where applicable])], [ +AC_ARG_ENABLE([launchd], + [AS_HELP_STRING([--disable-launchd], + [don't install launchd configuration where applicable]) +], [ use_launchd=$enableval ], []) -AC_ARG_ENABLE([native-mochijson], [AC_HELP_STRING([--enable-native-mochijson], - [compile mochijson to native code (EXPERIMENTAL)])], [ +AC_ARG_ENABLE([native-mochijson], + [AS_HELP_STRING([--enable-native-mochijson], + [compile mochijson to native code (EXPERIMENTAL)]) +], [ native_mochijson_enabled=$enableval ], []) +AC_ARG_ENABLE([tests], + [AS_HELP_STRING([--disable-tests], + [skip tests during build]) +], [ + tests_enabled=$enableval +], []) + +AC_ARG_ENABLE([docs], + [AS_HELP_STRING([--disable-docs], + [skip docs during build]) +], [ + docs_enabled=$enableval +], []) + +AC_ARG_ENABLE([strictness], + [AS_HELP_STRING([--enable-strictness], + [exit when optional checks fail]) +], [ + strictness_enabled=$enableval +], []) + init_enabled=false launchd_enabled=false @@ -514,26 +550,122 @@ fi fi -AC_ARG_VAR([ERL], [path to the `erl' executable]) -AC_ARG_VAR([ERLC], [path to the `erlc' executable]) -AC_ARG_VAR([HELP2MAN_EXECUTABLE], [path to the `help2man' program]) +AC_CHECK_PROG([HAS_HELP2MAN], [help2man], [yes]) -if test -n "$HELP2MAN_EXECUTABLE"; then - help2man_enabled=true +if test x${HAS_HELP2MAN} = x; then + if test x${strictness_enabled} = xyes; then + AC_MSG_ERROR([Could not find the `help2man' executable.]) + else + AC_MSG_WARN([You will be unable to regenerate man pages.]) + fi +fi + +AC_CHECK_PROG([HAS_PDFLATEX], [pdflatex], [yes]) + +if test x${HAS_PDFLATEX} = x; then + if test x${strictness_enabled} = xyes; then + AC_MSG_ERROR([Could not find the `pdflatex' executable. + +Is LaTeX installed?]) + else + AC_MSG_WARN([You will be unable to regenerate PDF documentation.]) + fi +fi + +AC_CHECK_PROG([HAS_MAKEINFO], [makeinfo], [yes]) + +if test x${HAS_MAKEINFO} = x; then + if test x${strictness_enabled} = xyes; then + AC_MSG_ERROR([Could not find the `makeinfo' executable. + +Is GNU Texinfo installed?]) + else + AC_MSG_WARN([You will be unable to regenerate info documentation.]) + fi +fi + +AC_CHECK_PROG([HAS_INSTALLINFO], [install-info], [yes]) + +if test x${HAS_INSTALLINFO} = x; then + if test x${strictness_enabled} = xyes; then + AC_MSG_ERROR([Could not find the `install-info' executable. + +Is GNU Texinfo installed?]) + else + AC_MSG_WARN([You will be unable to install info documentation.]) + fi +fi + +AC_CHECK_PROG([HAS_SPHINX_BUILD], [sphinx-build], [yes]) + +if test x${HAS_SPHINX_BUILD} = x; then + if test x${strictness_enabled} = xyes; then + AC_MSG_ERROR([Could not find the `sphinx-build' executable.]) + else + AC_MSG_WARN([You will be unable to regenerate documentation.]) + fi +fi + +if test x${docs_enabled} = xyes; then + if test x${HAS_HELP2MAN} = xyes; then + build_man=yes + fi + if test x${HAS_SPHINX_BUILD} = xyes; then + build_html=yes + if test x${HAS_MAKEINFO} = xyes; then + build_info=yes + fi + if test x${HAS_PDFLATEX} = xyes; then + build_pdf=yes + fi + fi +fi + +if test x${strictness_enabled} = xyes; then + AX_PYTHON_MODULE([pygments], [fatal]) else - if test -f "$srcdir/bin/couchdb.1" -a -f "$srcdir/src/couchdb/priv/couchjs.1"; then - help2man_enabled=true + AX_PYTHON_MODULE([pygments]) +fi + +AC_MSG_CHECKING(pygments version >= 1.5) + +python 2> /dev/null << EOF +import sys +import pygments + +if float(pygments.__version__) >= 1.5: + sys.exit(0) +else: + sys.exit(1) +EOF + +if test $? -eq 0; then + AC_MSG_RESULT(yes) +else + AC_MSG_RESULT(no) + if test x${strictness_enabled} = xyes; then + AC_MSG_ERROR([Your copy of pygments is out of date.], 1) else - help2man_enabled=false + AC_MSG_WARN([Syntax highlighting may not work.]) fi fi +AC_ARG_VAR([ERL], [path to the `erl' executable]) +AC_ARG_VAR([ERLC], [path to the `erlc' executable]) + AM_CONDITIONAL([INIT], [test x${init_enabled} = xtrue]) AM_CONDITIONAL([LAUNCHD], [test x${launchd_enabled} = xtrue]) -AM_CONDITIONAL([HELP2MAN], [test x${help2man_enabled} = xtrue]) AM_CONDITIONAL([USE_NATIVE_MOCHIJSON], [test x${native_mochijson_enabled} = xyes]) AM_CONDITIONAL([USE_CURL], [test x${use_curl} = xyes]) +AM_CONDITIONAL([BUILD_MAN], [test x${build_man} = xyes]) +AM_CONDITIONAL([BUILD_INFO], [test x${build_info} = xyes]) +AM_CONDITIONAL([BUILD_PDF], [test x${build_pdf} = xyes]) +AM_CONDITIONAL([BUILD_HTML], [test x${build_html} = xyes]) + +AM_CONDITIONAL([TESTS], [test x${tests_enabled} = xyes]) +AM_CONDITIONAL([STRICTNESS], [test x${strictness_enabled} = xyes]) + AC_SUBST([package_author_name], ["LOCAL_PACKAGE_AUTHOR_NAME"]) AC_SUBST([package_author_address], ["LOCAL_PACKAGE_AUTHOR_ADDRESS"]) AC_SUBST([package_identifier], ["LOCAL_PACKAGE_IDENTIFIER"]) @@ -589,7 +721,13 @@ AC_CONFIG_FILES([etc/windows/Makefile]) AC_CONFIG_FILES([etc/Makefile]) AC_CONFIG_FILES([share/Makefile]) +AC_CONFIG_FILES([share/doc/Makefile]) +AC_CONFIG_FILES([share/doc/build/Makefile]) AC_CONFIG_FILES([src/Makefile]) +AC_CONFIG_FILES([src/couch_dbupdates/Makefile]) +AC_CONFIG_FILES([src/couch_index/Makefile]) +AC_CONFIG_FILES([src/couch_mrview/Makefile]) +AC_CONFIG_FILES([src/couch_replicator/Makefile]) AC_CONFIG_FILES([src/couchdb/couch.app.tpl]) AC_CONFIG_FILES([src/couchdb/Makefile]) AC_CONFIG_FILES([src/couchdb/priv/Makefile]) @@ -618,14 +756,18 @@ # identifies this dastardly mix as a unix variant, and libtool kindly # passes incorrect flags and names through to the MS linker. The simplest fix # is to modify libtool via sed to remove those options. +# # As this is only done once at first configure, and subsequent config or source # changes may trigger a silent reversion to the non-functioning original. +# # Changes are; +# # 1. replace LIB$name with $name in libname_spec (e.g. libicu -> icu) to ensure # correct windows versions of .lib and .dlls are found or generated. # 2. remove incompatible \w-link\w from archive_cmds # 3. remove GNU-style directives to be passed through to the linker # 4. swap GNU-style shared library flags with MS -dll variant +# # This obscene hackery is tracked under COUCHDB-440 and COUCHDB-1197. if test x${IS_WINDOWS} = xTRUE; then diff -Nru couchdb-1.2.0/debian/changelog couchdb-1.4.0~rc.1/debian/changelog --- couchdb-1.2.0/debian/changelog 2013-01-22 14:18:34.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/changelog 2013-08-28 13:53:47.000000000 -0400 @@ -1,3 +1,41 @@ +couchdb (1.4.0~rc.1-0ubuntu1) saucy; urgency=low + + * New upstream release (LP: #1022515) + * Switch from CDBS to pure debhelper (compat 9) + * Bump Standards-Version to 3.9.4 + * Use an Upstart job instead of the upstream SysV init.d script + * Remove Build-Depends: cdbs, libreadline-dev + * Add Build-Depends: erlang-os-mon, erlang-syntax-tools, python-sphinx, + texlive-latex-base, texlive-latex-recommended, texlive-latex-extra, + texlive-fonts-recommended, texinfo + * Remove couchdb-bin Depends: procps, lsb-base (needed for SysV init.d script) + * Remove couchdb-bin Depends: libjs-jquery (1.7.2 is in Saucy, but the + internal CouchDB jquery is now at version 1.8.3) + * Simplify Erlang couchdb-bin Depends to just ${erlang-abi:Depends}, ${erlang:Depends} + * Add couchdb Depends: upstart + * Remove deprecated couchdb-bin.postinst, couchdb-bin.postrm + * Thanks to the Upstart job, couchdb.postrm no longer needs `sleep 3` hack, + nor needs to `rm -r -f "/var/run/couchdb"` + * Stop using versioned database_dir /var/lib/couchdb/VERSION as this isn't + done upstream and CouchDB is no longer considered alpha software + * Remove README.Debian, README.source as they're no longer applicable + * Drop patches superseded upstream for CVE-2012-5649, CVE-2012-5650: + - improve_parsing_of_mochiweb_relative_paths.patch + - improve_script_url_validation.patch + - include_a_comment_before_jsonp_output.patch + * Because of the switch to Upstart, drop unneeded SysV init.d script patches: + - force-reload.patch + - couchdb_own_rundir.patch + - wait_for_couchdb_stop.patch + * Drop couchdb_sighup.patch, superseded upstream + * Drop logrotate_as_couchdb.patch as it doesn't make sense for the CouchDB + daemon to be able to modify its own archived log files + * Move static data and docs in "/usr/share/couchdb" from `couchdb-bin` into + new `couchdb-common` Architecture:all package + * Add couchdb-bin Depends: couchdb-common (= ${source:Version}) + + -- Jason Gerard DeRose Mon, 26 Aug 2013 13:47:29 -0600 + couchdb (1.2.0-5ubuntu1) raring; urgency=low * Merge from Debian unstable. Remaining changes: diff -Nru couchdb-1.2.0/debian/compat couchdb-1.4.0~rc.1/debian/compat --- couchdb-1.2.0/debian/compat 2013-01-09 05:52:20.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/compat 2013-08-28 13:53:47.000000000 -0400 @@ -1 +1 @@ -5 +9 diff -Nru couchdb-1.2.0/debian/control couchdb-1.4.0~rc.1/debian/control --- couchdb-1.2.0/debian/control 2013-01-09 05:52:20.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/control 2013-08-28 13:53:47.000000000 -0400 @@ -2,36 +2,53 @@ Section: misc Priority: optional Maintainer: Ubuntu Developers -XSBC-Original-Maintainer: Laszlo Boszormenyi (GCS) -Standards-Version: 3.9.3 -Build-Depends: cdbs (>= 0.4.42), - debhelper (>= 7.2.11), +XSBC-Original-Maintainer: Jason Gerard DeRose +Standards-Version: 3.9.4 +Build-Depends: debhelper (>= 9), + erlang-dev (>= 1:14.b.4), erlang-crypto, - erlang-dev (>= 1:13.b.2.1), erlang-eunit, erlang-inets, erlang-xmerl, + erlang-os-mon, + erlang-syntax-tools, help2man, libcurl4-openssl-dev, libicu-dev, libmozjs185-dev, - libreadline-dev + python-sphinx (>= 1.1.3), + texlive-latex-base, + texlive-latex-recommended, + texlive-latex-extra, + texlive-fonts-recommended, + texinfo, Homepage: http://couchdb.apache.org/ +Package: couchdb-common +Architecture: all +Depends: ${misc:Depends} +Replaces: couchdb-bin (<= 1.2.0-5ubuntu1) +Conflicts: couchdb-bin (<= 1.2.0-5ubuntu1) +Description: RESTful document oriented database - common data + Apache CouchDB is a distributed, fault-tolerant and schema-free + document-oriented database accessible via a RESTful HTTP/JSON API. Among other + features, it provides robust, incremental replication with bi-directional + conflict detection and resolution, and is queryable and indexable using a + table-oriented view engine with JavaScript acting as the default view + definition language. + . + CouchDB is written in Erlang, but can be easily accessed from any environment + that provides means to make HTTP requests. There are a multitude of third-party + client libraries that make this even easier for a variety of programming + languages and environments. + Package: couchdb-bin Architecture: any -Depends: erlang-base-hipe | erlang-base | ${erlang-abi:Depends}, - libjs-jquery (>= 1.4.2), - libjs-jquery-form (>= 2.36), - lsb-base, - procps, - ${erlang-crypto:Depends}, - ${erlang-inets:Depends}, - ${erlang-xmerl:Depends}, +Depends: couchdb-common (= ${source:Version}), + ${erlang-abi:Depends}, ${erlang:Depends}, + ${shlibs:Depends}, ${misc:Depends}, - ${shlibs:Depends} -Suggests: couchdb Description: RESTful document oriented database Apache CouchDB is a distributed, fault-tolerant and schema-free document-oriented database accessible via a RESTful HTTP/JSON API. Among other @@ -47,9 +64,12 @@ Package: couchdb Architecture: all -Depends: couchdb-bin (>= ${binary:Version}), ${misc:Depends}, adduser +Depends: couchdb-bin (>= ${source:Version}), + adduser, + upstart, + ${misc:Depends}, Replaces: couchdb-bin (<= 1.0.1-0ubuntu18) -Description: RESTful document oriented database (system-wide instance) +Description: RESTful document oriented database - system-wide instance Apache CouchDB is a distributed, fault-tolerant and schema-free document-oriented database accessible via a RESTful HTTP/JSON API. Among other features, it provides robust, incremental replication with bi-directional @@ -62,5 +82,5 @@ client libraries that make this even easier for a variety of programming languages and environments. . - This package adds the /etc/init.d/couchdb script and other items needed for a - system-wide CouchDB instance that is started at boot. + This package adds the Upstart job and other items needed for a system-wide + CouchDB instance that is started at boot. diff -Nru couchdb-1.2.0/debian/couchdb-bin.install couchdb-1.4.0~rc.1/debian/couchdb-bin.install --- couchdb-1.2.0/debian/couchdb-bin.install 2013-01-09 05:52:20.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/couchdb-bin.install 2013-08-28 13:53:47.000000000 -0400 @@ -1,3 +1,6 @@ -debian/tmp/usr +debian/tmp/usr/bin +debian/tmp/usr/lib +debian/tmp/usr/share/man +debian/tmp/usr/share/info debian/tmp/etc/couchdb/default.ini debian/tmp/etc/couchdb/default.d diff -Nru couchdb-1.2.0/debian/couchdb-bin.postinst couchdb-1.4.0~rc.1/debian/couchdb-bin.postinst --- couchdb-1.2.0/debian/couchdb-bin.postinst 2013-01-09 05:52:20.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/couchdb-bin.postinst 1969-12-31 19:00:00.000000000 -0500 @@ -1,17 +0,0 @@ -#!/bin/sh -e - -case $1 in - configure) - if dpkg --compare-versions "$2" lt-nl 1.2.0-2ubuntu1; then - # Hack to make the upgrade from the 1.0.1 Ubuntu package more correct: - chown root:root /etc/couchdb - chown root:root /etc/couchdb/default.ini - chown -R root:root /etc/couchdb/default.d - chmod 755 /etc/couchdb - chmod 644 /etc/couchdb/default.ini - chmod 755 /etc/couchdb/default.d - fi - ;; -esac - -#DEBHELPER# diff -Nru couchdb-1.2.0/debian/couchdb-bin.postrm couchdb-1.4.0~rc.1/debian/couchdb-bin.postrm --- couchdb-1.2.0/debian/couchdb-bin.postrm 2013-01-09 05:52:20.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/couchdb-bin.postrm 1969-12-31 19:00:00.000000000 -0500 @@ -1,16 +0,0 @@ -#!/bin/sh -e - -# Copyright 2009, Noah Slater - -# Copying and distribution of this file, with or without modification, are -# permitted in any medium without royalty provided the copyright notice and this -# notice are preserved. - -case $1 in - purge) - if test -d "/etc/couchdb"; then - rmdir --ignore-fail-on-non-empty "/etc/couchdb" || true - fi -esac - -#DEBHELPER# diff -Nru couchdb-1.2.0/debian/couchdb-common.install couchdb-1.4.0~rc.1/debian/couchdb-common.install --- couchdb-1.2.0/debian/couchdb-common.install 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/couchdb-common.install 2013-08-28 13:53:47.000000000 -0400 @@ -0,0 +1,3 @@ +debian/tmp/usr/share/couchdb +debian/tmp/usr/share/doc/couchdb/AUTHORS.gz usr/share/doc/couchdb-common +debian/tmp/usr/share/doc/couchdb/THANKS.gz usr/share/doc/couchdb-common diff -Nru couchdb-1.2.0/debian/couchdb.install couchdb-1.4.0~rc.1/debian/couchdb.install --- couchdb-1.2.0/debian/couchdb.install 2013-01-09 05:52:20.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/couchdb.install 2013-08-28 13:53:47.000000000 -0400 @@ -1,5 +1,3 @@ -debian/tmp/etc/default -debian/tmp/etc/init.d debian/tmp/etc/couchdb/local.ini debian/tmp/etc/couchdb/local.d debian/tmp/etc/logrotate.d diff -Nru couchdb-1.2.0/debian/couchdb.lintian-overrides couchdb-1.4.0~rc.1/debian/couchdb.lintian-overrides --- couchdb-1.2.0/debian/couchdb.lintian-overrides 2013-01-09 05:52:20.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/couchdb.lintian-overrides 2013-08-28 13:53:47.000000000 -0400 @@ -1,3 +1,4 @@ -non-standard-file-perm etc/couchdb/local.ini 0660 != 0644 +non-standard-file-perm etc/couchdb/local.ini 0640 != 0644 +non-standard-file-perm etc/couchdb/local.ini.d 0750 != 0755 non-standard-dir-perm var/lib/couchdb/ 0750 != 0755 non-standard-dir-perm var/log/couchdb/ 0750 != 0755 diff -Nru couchdb-1.2.0/debian/couchdb.postinst couchdb-1.4.0~rc.1/debian/couchdb.postinst --- couchdb-1.2.0/debian/couchdb.postinst 2013-01-09 05:52:20.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/couchdb.postinst 2013-08-28 13:53:47.000000000 -0400 @@ -21,6 +21,12 @@ echo "The couchdb administrative group must not be root." >&2 false fi + # These should not be world readable: + chmod 0640 /etc/couchdb/local.ini + chmod 0750 /etc/couchdb/local.d + chmod 0750 /var/lib/couchdb + chmod 0750 /var/log/couchdb + # And should be owned by the couchdb user and group: chown couchdb:couchdb /etc/couchdb/local.ini chown -R couchdb:couchdb /etc/couchdb/local.d chown -R couchdb:couchdb /var/lib/couchdb diff -Nru couchdb-1.2.0/debian/couchdb.postrm couchdb-1.4.0~rc.1/debian/couchdb.postrm --- couchdb-1.2.0/debian/couchdb.postrm 2013-01-22 14:59:11.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/couchdb.postrm 2013-08-28 13:53:47.000000000 -0400 @@ -10,20 +10,15 @@ case $1 in purge) - sleep 3 # Time for couchdb to actually terminate, so we can deluser if test -d "/etc/couchdb/local.d"; then rm -r -f "/etc/couchdb/local.d" fi - if test -d "/var/lib/couchdb"; then rm -r -f "/var/lib/couchdb" fi if test -d "/var/log/couchdb"; then rm -r -f "/var/log/couchdb" fi - if test -d "/var/run/couchdb"; then - rm -r -f "/var/run/couchdb" - fi ;; esac diff -Nru couchdb-1.2.0/debian/couchdb.upstart couchdb-1.4.0~rc.1/debian/couchdb.upstart --- couchdb-1.2.0/debian/couchdb.upstart 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/couchdb.upstart 2013-08-28 13:53:47.000000000 -0400 @@ -0,0 +1,19 @@ +# couchdb - a RESTful document oriented database + +description "Start the system-wide CouchDB instance" + +start on runlevel [2345] +stop on runlevel [016] +respawn + +pre-start script + mkdir -p /var/run/couchdb + chown couchdb:couchdb /var/run/couchdb +end script + +exec su couchdb -c /usr/bin/couchdb + +post-stop script + rm -rf /var/run/couchdb +end script + diff -Nru couchdb-1.2.0/debian/patches/couchdb_own_rundir.patch couchdb-1.4.0~rc.1/debian/patches/couchdb_own_rundir.patch --- couchdb-1.2.0/debian/patches/couchdb_own_rundir.patch 2013-01-09 05:52:20.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/patches/couchdb_own_rundir.patch 1969-12-31 19:00:00.000000000 -0500 @@ -1,20 +0,0 @@ -Description: Initscript creates RUN_DIR , make sure it's owned by couchdb - Use install to make COUCHDB_USER own the RUN_DIR being created. -Author: Laszlo Boszormenyi (GCS) -Bug-Debian: http://bugs.debian.org/681549 -Last-Update: 2012-11-18 - ---- - ---- couchdb-1.2.0.orig/etc/init/couchdb.tpl.in -+++ couchdb-1.2.0/etc/init/couchdb.tpl.in -@@ -83,7 +83,8 @@ run_command () { - start_couchdb () { - # Start Apache CouchDB as a background process. - -- mkdir -p "$RUN_DIR" -+ test -e "$RUN_DIR" || \ -+ install -m 755 -o "$COUCHDB_USER" -g "$COUCHDB_USER" -d "$RUN_DIR" - command="$COUCHDB -b" - if test -n "$COUCHDB_STDOUT_FILE"; then - command="$command -o $COUCHDB_STDOUT_FILE" diff -Nru couchdb-1.2.0/debian/patches/couchdb_sighup.patch couchdb-1.4.0~rc.1/debian/patches/couchdb_sighup.patch --- couchdb-1.2.0/debian/patches/couchdb_sighup.patch 2013-01-09 05:52:20.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/patches/couchdb_sighup.patch 1969-12-31 19:00:00.000000000 -0500 @@ -1,22 +0,0 @@ -Description: Use SIGTERM instead of SIGHUP for graceful shutdown - The SIGHUP couchdb uses by default for a graceful shutdown does not - work reliably in Debian as apt, when installing the package, SigIgn - masks SIGHUP for all its child processes. This is intentional and hard - to work around, and CouchDB does a graceful shutdown with SIGTERM as - well so there is no reason to use SIGHUP. -Author: Dominik George -Bug-Debian: http://bugs.debian.org/692295 - ---- - ---- couchdb-1.2.0.orig/bin/couchdb.tpl.in -+++ couchdb-1.2.0/bin/couchdb.tpl.in -@@ -272,7 +272,7 @@ stop_couchdb () { - echo > $PID_FILE - fi - if kill -0 $PID 2> /dev/null; then -- if kill -1 $PID 2> /dev/null; then -+ if kill -15 $PID 2> /dev/null; then - if test "$1" = "false"; then - echo "Apache CouchDB has been shutdown." - else diff -Nru couchdb-1.2.0/debian/patches/force-reload.patch couchdb-1.4.0~rc.1/debian/patches/force-reload.patch --- couchdb-1.2.0/debian/patches/force-reload.patch 2013-01-09 05:52:20.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/patches/force-reload.patch 1969-12-31 19:00:00.000000000 -0500 @@ -1,21 +0,0 @@ -diff -Nur couchdb-1.0.2.orig//etc/init/couchdb.tpl.in couchdb-1.0.2/etc/init/couchdb.tpl.in ---- couchdb-1.0.2.orig//etc/init/couchdb.tpl.in 2010-03-13 13:58:07.000000000 +0100 -+++ couchdb-1.0.2/etc/init/couchdb.tpl.in 2011-06-05 05:37:19.056697586 +0200 -@@ -129,7 +129,7 @@ - log_end_msg $SCRIPT_ERROR - fi - ;; -- restart) -+ restart|force-reload) - log_daemon_msg "Restarting $DESCRIPTION" $NAME - if stop_couchdb; then - if start_couchdb; then -@@ -146,7 +146,7 @@ - ;; - *) - cat << EOF >&2 --Usage: $SCRIPT_NAME {start|stop|restart|status} -+Usage: $SCRIPT_NAME {start|stop|restart|force-reload|status} - EOF - exit $SCRIPT_ERROR - ;; diff -Nru couchdb-1.2.0/debian/patches/improve_parsing_of_mochiweb_relative_paths.patch couchdb-1.4.0~rc.1/debian/patches/improve_parsing_of_mochiweb_relative_paths.patch --- couchdb-1.2.0/debian/patches/improve_parsing_of_mochiweb_relative_paths.patch 2013-01-22 14:18:34.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/patches/improve_parsing_of_mochiweb_relative_paths.patch 1969-12-31 19:00:00.000000000 -0500 @@ -1,40 +0,0 @@ -Description: improve parsing of mochiweb relative paths - Patch adapted from http://www.couchbase.com/issues/browse/MB-7390 -Author: Sriram Melkote -Bug-Debian: http://bugs.debian.org/698439 -Last-Update: 2012-12-15 - ---- - -diff --git a/src/mochiweb/mochiweb_util.erl b/src/mochiweb/mochiweb_util.erl -index 3b50fe7..6b88818 100644 ---- a/src/mochiweb/mochiweb_util.erl -+++ b/src/mochiweb/mochiweb_util.erl -@@ -68,11 +68,17 @@ partition2(_S, _Sep) -> - %% @spec safe_relative_path(string()) -> string() | undefined - %% @doc Return the reduced version of a relative path or undefined if it - %% is not safe. safe relative paths can be joined with an absolute path --%% and will result in a subdirectory of the absolute path. -+%% and will result in a subdirectory of the absolute path. Safe paths -+%% never contain a backslash character. - safe_relative_path("/" ++ _) -> - undefined; - safe_relative_path(P) -> -- safe_relative_path(P, []). -+ case string:chr(P, $\\) of -+ 0 -> -+ safe_relative_path(P, []); -+ _ -> -+ undefined -+ end. - - safe_relative_path("", Acc) -> - case Acc of -@@ -809,6 +815,7 @@ safe_relative_path_test() -> - undefined = safe_relative_path("../foo"), - undefined = safe_relative_path("foo/../.."), - undefined = safe_relative_path("foo//"), -+ undefined = safe_relative_path("foo\\bar"), - ok. - - parse_qvalues_test() -> diff -Nru couchdb-1.2.0/debian/patches/improve_script_url_validation.patch couchdb-1.4.0~rc.1/debian/patches/improve_script_url_validation.patch --- couchdb-1.2.0/debian/patches/improve_script_url_validation.patch 2013-01-22 14:18:34.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/patches/improve_script_url_validation.patch 1969-12-31 19:00:00.000000000 -0500 @@ -1,26 +0,0 @@ -Description: Improve script url validation -Author: Robert Newson -Bug-Debian: http://bugs.debian.org/698439 -Last-Update: 2012-12-18 - ---- - -diff --git a/share/www/script/couch_test_runner.js b/share/www/script/couch_test_runner.js -index c1e7a72..60ba11c 100644 ---- a/share/www/script/couch_test_runner.js -+++ b/share/www/script/couch_test_runner.js -@@ -15,11 +15,9 @@ - - function loadScript(url) { - // disallow loading remote URLs -- if((url.substr(0, 7) == "http://") -- || (url.substr(0, 2) == "//") -- || (url.substr(0, 5) == "data:") -- || (url.substr(0, 11) == "javascript:")) { -- throw "Not loading remote test scripts"; -+ var re = /^[a-z0-9_]+(\/[a-z0-9_]+)*\.js#?$/; -+ if (!re.test(url)) { -+ throw "Not loading remote test scripts"; - } - if (typeof document != "undefined") document.write(''); - }; diff -Nru couchdb-1.2.0/debian/patches/include_a_comment_before_jsonp_output.patch couchdb-1.4.0~rc.1/debian/patches/include_a_comment_before_jsonp_output.patch --- couchdb-1.2.0/debian/patches/include_a_comment_before_jsonp_output.patch 2013-01-22 14:18:34.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/patches/include_a_comment_before_jsonp_output.patch 1969-12-31 19:00:00.000000000 -0500 @@ -1,20 +0,0 @@ -Description: Include a comment before jsonp output -Author: Robert Newson -Bug-Debian: http://bugs.debian.org/698439 -Last-Update: 2012-12-19 - ---- - -diff --git a/src/couchdb/couch_httpd.erl b/src/couchdb/couch_httpd.erl -index 0be7126..58f5ec6 100644 ---- a/src/couchdb/couch_httpd.erl -+++ b/src/couchdb/couch_httpd.erl -@@ -746,7 +746,7 @@ start_jsonp() -> - case get(jsonp) of - no_jsonp -> []; - [] -> []; -- CallBack -> CallBack ++ "(" -+ CallBack -> ["/* CouchDB */", CallBack, "("] - end. - - end_jsonp() -> diff -Nru couchdb-1.2.0/debian/patches/logrotate_as_couchdb.patch couchdb-1.4.0~rc.1/debian/patches/logrotate_as_couchdb.patch --- couchdb-1.2.0/debian/patches/logrotate_as_couchdb.patch 2013-01-09 05:52:20.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/patches/logrotate_as_couchdb.patch 1969-12-31 19:00:00.000000000 -0500 @@ -1,16 +0,0 @@ -Description: Use logrotate as couchdb user - Use su and create to make logfiles owned by couchdb -Author: Laszlo Boszormenyi (GCS) -Bug-Debian: http://bugs.debian.org/652172 -Last-Update: 2012-11-18 ---- - ---- couchdb-1.2.0.orig/etc/logrotate.d/couchdb.tpl.in -+++ couchdb-1.2.0/etc/logrotate.d/couchdb.tpl.in -@@ -6,4 +6,6 @@ - compress - notifempty - missingok -+ su couchdb couchdb -+ create 0640 couchdb couchdb - } diff -Nru couchdb-1.2.0/debian/patches/series couchdb-1.4.0~rc.1/debian/patches/series --- couchdb-1.2.0/debian/patches/series 2013-01-22 14:18:34.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/patches/series 2013-08-28 13:53:47.000000000 -0400 @@ -1,8 +0,0 @@ -force-reload.patch -couchdb_own_rundir.patch -logrotate_as_couchdb.patch -couchdb_sighup.patch -wait_for_couchdb_stop.patch -improve_parsing_of_mochiweb_relative_paths.patch -improve_script_url_validation.patch -include_a_comment_before_jsonp_output.patch diff -Nru couchdb-1.2.0/debian/patches/wait_for_couchdb_stop.patch couchdb-1.4.0~rc.1/debian/patches/wait_for_couchdb_stop.patch --- couchdb-1.2.0/debian/patches/wait_for_couchdb_stop.patch 2013-01-09 05:52:20.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/patches/wait_for_couchdb_stop.patch 1969-12-31 19:00:00.000000000 -0500 @@ -1,30 +0,0 @@ -Description: Wait for complete stop of CouchDB - Check if CouchDB is already stopped and wait for a second if not before - checking again. - . -Author: Laszlo Boszormenyi (GCS) -Bug-Debian: http://bugs.debian.org/692295 -Last-Update: <2012-11-20> - ---- - ---- couchdb-1.2.0.orig/etc/init/couchdb.tpl.in -+++ couchdb-1.2.0/etc/init/couchdb.tpl.in -@@ -102,6 +102,17 @@ stop_couchdb () { - # Stop the running Apache CouchDB process. - - run_command "$COUCHDB -d" > /dev/null -+ RET=1; -+ for i in $(seq 1 30); do -+ status=`$COUCHDB -s 2>/dev/null | grep -c process`; -+ if [ "$status" -eq 0 ]; then -+ RET=0; -+ break; -+ fi; -+ echo -n .; -+ sleep 1s; -+ done; -+ return $RET - } - - display_status () { diff -Nru couchdb-1.2.0/debian/README.Debian couchdb-1.4.0~rc.1/debian/README.Debian --- couchdb-1.2.0/debian/README.Debian 2013-01-09 05:52:20.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/README.Debian 1969-12-31 19:00:00.000000000 -0500 @@ -1,26 +0,0 @@ -Debian README -============= - -Apache CouchDB is alpha software and still under heavy development. Please be -aware that important areas such as the public API or internal database format -may see backwards incompatible changes between versions. - -More detailed information can be found on the CouchDB wiki: - - http://wiki.apache.org/couchdb/BreakingChanges - -Because the internal database format may change, rendering your database -unusable with a new version of the package, the database directory is -partitioned by the CouchDB release number of the database format in use. - -The partitioned database directories are named like this: - - /var/lib/couchdb/VERSION - -Before you upgrade to a new version of this package with a changed database -format you should export any data that you want keep from the old database and -import the data into the new database after the upgrade. - -Migration tools are available in the `python-couchdb` package. - - -- Sam Bisbee Wed, 11 Nov 2009 23:22:21 -0500 diff -Nru couchdb-1.2.0/debian/README.source couchdb-1.4.0~rc.1/debian/README.source --- couchdb-1.2.0/debian/README.source 2013-01-09 05:52:20.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/README.source 1969-12-31 19:00:00.000000000 -0500 @@ -1,20 +0,0 @@ -Source README -============= - -The upstream source can be downloaded with the following command: - - ./debian/rules get-orig-source - -You will need the following packages installed: - - devscripts - -You can patch the upstream source with the following command: - - ./debian/rules patch - -You can add or edit patches using the following command: - - cdbs-edit-patch - - -- Noah Slater , Wed, 14 Jan 2009 16:46:53 +0000 diff -Nru couchdb-1.2.0/debian/rules couchdb-1.4.0~rc.1/debian/rules --- couchdb-1.2.0/debian/rules 2013-01-09 05:52:20.000000000 -0500 +++ couchdb-1.4.0~rc.1/debian/rules 2013-08-28 13:53:47.000000000 -0400 @@ -1,54 +1,18 @@ #!/usr/bin/make -f -# Copyright 2009, Noah Slater +%: + dh $@ -# Copying and distribution of this file, with or without modification, are -# permitted in any medium without royalty provided the copyright notice and this -# notice are preserved. +override_dh_auto_configure: + dh_auto_configure -- --enable-strictness --disable-init -include /usr/share/cdbs/1/rules/buildcore.mk -include /usr/share/cdbs/1/rules/debhelper.mk -include /usr/share/cdbs/1/class/autotools.mk +override_dh_auto_install: + dh_auto_install + chmod 0640 debian/tmp/etc/couchdb/local.ini + chmod 0750 debian/tmp/etc/couchdb/local.d + chmod 0750 debian/tmp/var/lib/couchdb + chmod 0750 debian/tmp/var/log/couchdb -#DEB_CONFIGURE_EXTRA_FLAGS = --enable-js-trunk -DEB_INSTALL_DOCS_ALL = -DEB_DH_INSTALLINIT_ARGS = -Ncouchdb-bin --onlyscripts - -LIB = ${localstatedir}/lib/\$${package_identifier} - -# @@ workaround for #486848 -binary-arch binary-indep: build - -post-patches:: - sed -i s,$(LIB)$$,$(LIB)/$(DEB_UPSTREAM_VERSION), configure - sed -i s,VERSION=%VERSION%$$,VERSION=$(DEB_UPSTREAM_VERSION), debian/couchdb.postrm - -cleanbuilddir:: - sed -i s,$(LIB)/$(DEB_UPSTREAM_VERSION)$$,$(LIB), configure - sed -i s,VERSION=$(DEB_UPSTREAM_VERSION)$$,VERSION=%VERSION%, debian/couchdb.postrm - -common-binary-post-install-arch:: - rm -r debian/tmp/var/run - rm -f debian/couchdb-bin/usr/share/doc/couchdb/LICENSE.gz - rm -f debian/couchdb-bin/usr/share/doc/couchdb/INSTALL.* - rm -f debian/couchdb-bin/usr/lib/couchdb/erlang/lib/couch-*/priv/lib/couch_erl_driver.la - rm -f debian/couchdb-bin/usr/share/couchdb/www/script/jquery.js - rm -f debian/couchdb-bin/usr/share/couchdb/www/script/jquery.form.js - chmod a-x debian/couchdb-bin/usr/share/couchdb/server/main.js \ - debian/couchdb-bin/usr/share/couchdb/server/main-coffee.js - sed -i "/dependency_libs/ s/'.*'/''/" `find debian/couchdb-bin -name '*.la'` - dh_link /usr/share/javascript/jquery/jquery.js /usr/share/couchdb/www/script/jquery.js - dh_link /usr/share/javascript/jquery-form/jquery.form.js /usr/share/couchdb/www/script/jquery.form.js - -common-binary-predeb-arch:: +override_dh_gencontrol: erlang-depends - -common-binary-predeb-indep:: - chmod 660 debian/couchdb/etc/couchdb/local.ini - chmod 750 debian/couchdb/var/lib/couchdb - chmod 750 debian/couchdb/var/log/couchdb - -# @@ only works from source directory, see #494141 -.PHONY: get-orig-source -get-orig-source: - uscan --force-download --rename --download-version=$(DEB_UPSTREAM_VERSION) --destdir . + dh_gencontrol diff -Nru couchdb-1.2.0/DEVELOPERS couchdb-1.4.0~rc.1/DEVELOPERS --- couchdb-1.2.0/DEVELOPERS 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/DEVELOPERS 2013-08-23 10:57:21.000000000 -0400 @@ -1,6 +1,10 @@ Apache CouchDB DEVELOPERS ========================= +Before you start here, read `INSTALL.Unix` (or `INSTALL.Windows`) and +follow the setup instructions including the installation of all the +listed dependencies for your system. + Only follow these instructions if you are building from a source checkout. If you're unsure what this means, ignore this document. @@ -10,30 +14,109 @@ You will need the following installed: - * GNU Automake (>=1.6.3) (http://www.gnu.org/software/automake/) - * GNU Autoconf (>=2.59) (http://www.gnu.org/software/autoconf/) * GNU Libtool (http://www.gnu.org/software/libtool/) + * GNU Automake (>=1.6.3) (http://www.gnu.org/software/automake/) + * GNU Autoconf (>=2.63) (http://www.gnu.org/software/autoconf/) + * GNU Autoconf Archive (http://www.gnu.org/software/autoconf-archive/) + * pkg-config (http://www.freedesktop.org/wiki/Software/pkg-config) + +You may also need: + + * Sphinx (http://sphinx.pocoo.org/) + * LaTex (http://www.latex-project.org/) + * GNU Texinfo (http://www.gnu.org/software/texinfo/) * GNU help2man (http://www.gnu.org/software/help2man/) + * GnuPG (http://www.gnupg.org/) + * md5sum (http://www.microbrew.org/tools/md5sha1sum/) + * sha1sum (http://www.microbrew.org/tools/md5sha1sum/) -The `help2man` tool is optional, but will generate `man` pages for you. +The first of these optional dependencies are required for building the +documentation. The last three are needed to build releases. -Debian-based (inc. Ubuntu) Systems -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +You will need these optional dependencies installed if: + + * You are working on the documentation, or + * You are preparing a distribution archive -You can install the dependencies by running: +However, you do not need them if: - apt-get install automake autoconf libtool help2man + * You are building from a distribution archive, or + * You don't care about building the documentation -Be sure to update the version numbers to match your system's available packages. + +Here is a list of *optional* dependencies for various operating systems. +Installation will be easiest, when you install them all. + +Debian-based (inc. Ubuntu) Systems +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + sudo apt-get install help2man + sudo apt-get install python-sphinx + sudo apt-get install texlive-latex-base + sudo apt-get install texlive-latex-recommended + sudo apt-get install texlive-latex-extra + sudo apt-get install texlive-fonts-recommended + sudo apt-get install texinfo + sudo apt-get install gnupg + +Gentoo-based Systems +~~~~~~~~~~~~~~~~~~~~ + + sudo emerge texinfo + sudo emerge gnupg + sudo emerge coreutils + sudo emerge pkgconfig + sudo emerge help2man + sudo USE=latex emerge sphinx + +RedHat-based (Fedora, Centos, RHEL) Systems +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + sudo yum install help2man + sudo yum install python-sphinx + sudo yum install python-docutils + sudo yum install python-pygments + sudo yum install texlive-latex + sudo yum install texlive-latex-fonts + sudo yum install texinfo + sudo yum install gnupg Mac OS X ~~~~~~~~ -You can install the dependencies by running: +Install Homebrew, if you do not have it already: + + https://github.com/mxcl/homebrew - port install automake autoconf libtool help2man +Unless you want to install the optional dependencies, skip to the next section. -You will need MacPorts installed to use the `port` command. +Install what else we can with Homebrew: + + brew install help2man + brew install gnupg + brew install md5sha1sum + +If you don't already have pip installed, install it: + + sudo easy_install pip + +Now, install the required Python packages: + + sudo pip install sphinx + sudo pip install docutils + sudo pip install pygments + +Download MaxTeX from here: + + http://www.tug.org/mactex/ + +Follow the instructions to get a working LaTeX install on your system. + +Windows +~~~~~~~ + +Follow the instructions in INSTALL.Windows and build all components from +source, using the same Visual C++ compiler and runtime. Bootstrapping ------------- @@ -44,6 +127,33 @@ You must repeat this step every time you update your source checkout. +Configuring +----------- + +Configure the source by running: + + ./configure + +Note that this will not fail when the optional dependencies are missing. + +To ensure the optional dependencies are installed, run: + + ./configure --enable-strictness + +If you don't care about docs and want to skip the whole thing, run: + + ./configure --disable-docs + +If you're working on the build system itself, you can run: + + ./configure --disable-tests + +This skips the tests allowing quicker `make' cycles. + +If you want to build it into different destination than `/usr/local`. + + ./configure --prefix=/ + Testing ------- @@ -60,12 +170,12 @@ Releasing --------- -Unix-like Systems -~~~~~~~~~~~~~~~~~ +The release procedure is documented here: -Configure the source by running: + https://wiki.apache.org/couchdb/Release_Procedure - ./configure +Unix-like Systems +~~~~~~~~~~~~~~~~~ Prepare the release artefacts by running: @@ -80,10 +190,6 @@ Microsoft Windows ~~~~~~~~~~~~~~~~~ -Configure the source by running: - - ./configure - Prepare the release artefacts by running: make dist diff -Nru couchdb-1.2.0/etc/couchdb/default.ini.tpl.in couchdb-1.4.0~rc.1/etc/couchdb/default.ini.tpl.in --- couchdb-1.2.0/etc/couchdb/default.ini.tpl.in 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/etc/couchdb/default.ini.tpl.in 2013-08-23 10:57:21.000000000 -0400 @@ -1,6 +1,9 @@ ; @configure_input@ ; Upgrading CouchDB will overwrite this file. +[vendor] +name = %package_author_name% +version = %version% [couchdb] database_dir = %localstatelibdir% @@ -19,6 +22,10 @@ ; deflate_[N] - use zlib's deflate, N is the compression level which ranges from 1 (fastest, ; lowest compression ratio) to 9 (slowest, highest compression ratio) file_compression = snappy +; Higher values may give better read performance due to less read operations +; and/or more OS page cache hits, but they can also increase overall response +; time for writes when there are many attachment write requests in parallel. +attachment_stream_buffer_size = 4096 [database_compaction] ; larger buffer sizes can originate smaller files @@ -42,6 +49,7 @@ ; For more socket options, consult Erlang's module 'inet' man page. ;socket_options = [{recbuf, 262144}, {sndbuf, 262144}, {nodelay, true}] log_max_chunk_size = 1000000 +enable_cors = false [ssl] port = 6984 @@ -58,6 +66,33 @@ timeout = 600 ; number of seconds before automatic logout auth_cache_size = 50 ; size is number of cache entries allow_persistent_cookies = false ; set to true to allow persistent cookies +iterations = 10 ; iterations for password hashing +; comma-separated list of public fields, 404 if empty +; public_fields = + +[cors] +credentials = false +; List of origins separated by a comma, * means accept all +; Origins must include the scheme: http://example.com +; You can’t set origins: * and credentials = true at the same time. +;origins = * +; List of accepted headers separated by a comma +; headers = +; List of accepted methods +; methods = + + +; Configuration for a vhost +;[cors:http://example.com] +; credentials = false +; List of origins separated by a comma +; Origins must include the scheme: http://example.com +; You can’t set origins: * and credentials = true at the same time. +;origins = +; List of accepted headers separated by a comma +; headers = +; List of accepted methods +; methods = [couch_httpd_oauth] ; If set to 'true', oauth token and consumer secrets will be looked up @@ -96,7 +131,7 @@ os_process_limit = 25 [daemons] -view_manager={couch_view, start_link, []} +index_server={couch_index_server, start_link, []} external_manager={couch_external_manager, start_link, []} query_servers={couch_query_servers, start_link, []} vhosts={couch_httpd_vhost, start_link, []} @@ -105,7 +140,7 @@ stats_collector={couch_stats_collector, start, []} uuids={couch_uuids, start, []} auth_cache={couch_auth_cache, start_link, []} -replication_manager={couch_replication_manager, start_link, []} +replicator_manager={couch_replicator_manager, start_link, []} os_daemons={couch_os_daemons, start_link, []} compaction_daemon={couch_compaction_daemon, start_link, []} @@ -117,20 +152,22 @@ _all_dbs = {couch_httpd_misc_handlers, handle_all_dbs_req} _active_tasks = {couch_httpd_misc_handlers, handle_task_status_req} _config = {couch_httpd_misc_handlers, handle_config_req} -_replicate = {couch_httpd_replicator, handle_req} +_replicate = {couch_replicator_httpd, handle_req} _uuids = {couch_httpd_misc_handlers, handle_uuids_req} _restart = {couch_httpd_misc_handlers, handle_restart_req} _stats = {couch_httpd_stats_handlers, handle_stats_req} _log = {couch_httpd_misc_handlers, handle_log_req} _session = {couch_httpd_auth, handle_session_req} _oauth = {couch_httpd_oauth, handle_oauth_req} +_db_updates = {couch_dbupdates_httpd, handle_req} [httpd_db_handlers] -_view_cleanup = {couch_httpd_db, handle_view_cleanup_req} +_all_docs = {couch_mrview_http, handle_all_docs_req} +_changes = {couch_httpd_db, handle_changes_req} _compact = {couch_httpd_db, handle_compact_req} _design = {couch_httpd_db, handle_design_req} -_temp_view = {couch_httpd_view, handle_temp_view_req} -_changes = {couch_httpd_db, handle_changes_req} +_temp_view = {couch_mrview_http, handle_temp_view_req} +_view_cleanup = {couch_mrview_http, handle_cleanup_req} ; The external module takes an optional argument allowing you to narrow it to a ; single script. Otherwise the script name is inferred from the first path section @@ -139,12 +176,13 @@ ; _external = {couch_httpd_external, handle_external_req} [httpd_design_handlers] -_view = {couch_httpd_view, handle_view_req} -_show = {couch_httpd_show, handle_doc_show_req} -_list = {couch_httpd_show, handle_view_list_req} -_info = {couch_httpd_db, handle_design_info_req} +_compact = {couch_mrview_http, handle_compact_req} +_info = {couch_mrview_http, handle_info_req} +_list = {couch_mrview_show, handle_view_list_req} _rewrite = {couch_httpd_rewrite, handle_rewrite_req} -_update = {couch_httpd_show, handle_doc_update_req} +_show = {couch_mrview_show, handle_doc_show_req} +_update = {couch_mrview_show, handle_doc_update_req} +_view = {couch_mrview_http, handle_view_req} ; enable external as an httpd handler, then link it with commands here. ; note, this api is still under consideration. @@ -168,7 +206,12 @@ ; random prefix is regenerated and the process starts over. ; utc_random - Time since Jan 1, 1970 UTC with microseconds ; First 14 characters are the time in hex. Last 18 are random. +; utc_id - Time since Jan 1, 1970 UTC with microseconds, plus utc_id_suffix string +; First 14 characters are the time in hex. uuids/utc_id_suffix string value is appended to these. algorithm = sequential +; The utc_id_suffix value will be appended to uuids generated by the utc_id algorithm. +; Replicating instances should have unique utc_id_suffix values to ensure uniqueness of utc_id ids. +utc_id_suffix = [stats] ; rate is in milliseconds @@ -209,7 +252,7 @@ ;cert_file = /full/path/to/server_cert.pem ; Path to file containing user's private PEM encoded key. ;key_file = /full/path/to/server_key.pem -; String containing the user's password. Only used if the private keyfile is password protected. +; String containing the user's password. Only used if the private keyfile is password protected. ;password = somepassword ; Set to true to validate peer certificates. verify_ssl_certificates = false diff -Nru couchdb-1.2.0/etc/couchdb/local.ini couchdb-1.4.0~rc.1/etc/couchdb/local.ini --- couchdb-1.2.0/etc/couchdb/local.ini 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/etc/couchdb/local.ini 2013-08-23 10:57:21.000000000 -0400 @@ -37,10 +37,18 @@ [log] ;level = debug +[log_level_by_module] +; In this section you can specify any of the four log levels 'none', 'info', +; 'error' or 'debug' on a per-module basis. See src/*/*.erl for various +; modules. +;couch_httpd = error + + [os_daemons] ; For any commands listed here, CouchDB will attempt to ensure that -; the process remains alive while CouchDB runs as well as shut them -; down when CouchDB exits. +; the process remains alive. Daemons should monitor their environment +; to know when to exit. This can most easily be accomplished by exiting +; when stdin is closed. ;foo = /path/to/command -with args [daemons] @@ -58,16 +66,17 @@ ; certificates used for verifying a peer certificate). May be omitted if ; you do not want to verify the peer. ;cacert_file = /full/path/to/cacertf -; The verification fun (optionnal) if not specidied, the default +; The verification fun (optional) if not specified, the default ; verification fun will be used. ;verify_fun = {Module, VerifyFun} +; maximum peer certificate depth ssl_certificate_max_depth = 1 + ; To enable Virtual Hosts in CouchDB, add a vhost = path directive. All requests to ; the Virual Host will be redirected to the path. In the example below all requests ; to http://example.com/ are redirected to /database. ; If you run CouchDB on a specific port, include the port number in the vhost: ; example.com:5984 = /database - [vhosts] ;example.com = /database/ diff -Nru couchdb-1.2.0/etc/couchdb/Makefile.am couchdb-1.4.0~rc.1/etc/couchdb/Makefile.am --- couchdb-1.2.0/etc/couchdb/Makefile.am 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/etc/couchdb/Makefile.am 2013-08-23 10:57:21.000000000 -0400 @@ -29,24 +29,28 @@ sed -e "s|%bindir%|.|g" \ -e "s|%localconfdir%|$(localconfdir)|g" \ -e "s|%localdatadir%|../share/couchdb|g" \ - -e "s|%localbuilddatadir%|../share/couchdb|g" \ + -e "s|%localbuilddatadir%|../share/couchdb|g" \ -e "s|%localstatelibdir%|../var/lib/couchdb|g" \ -e "s|%localstatelogdir%|../var/log/couchdb|g" \ -e "s|%localstaterundir%|../var/run/couchdb|g" \ -e "s|%couchprivlibdir%|../lib/couch-$(version)/priv/lib|g" \ -e "s|%couchjs_command_name%|couchjs.exe|g" \ + -e "s|%package_author_name%|$(package_author_name)|g" \ + -e "s|%version%|$(version)|g" \ < $< > $@ else default.ini: default.ini.tpl sed -e "s|%bindir%|$(bindir)|g" \ -e "s|%localconfdir%|$(localconfdir)|g" \ -e "s|%localdatadir%|$(localdatadir)|g" \ - -e "s|%localbuilddatadir%|$(localdatadir)|g" \ + -e "s|%localbuilddatadir%|$(localdatadir)|g" \ -e "s|%localstatelibdir%|$(localstatelibdir)|g" \ -e "s|%localstatelogdir%|$(localstatelogdir)|g" \ -e "s|%localstaterundir%|$(localstaterundir)|g" \ -e "s|%couchprivlibdir%|$(couchprivlibdir)|g" \ -e "s|%couchjs_command_name%|$(couchjs_command_name)|g" \ + -e "s|%package_author_name%|$(package_author_name)|g" \ + -e "s|%version%|$(version)|g" \ < $< > $@ endif @@ -54,12 +58,14 @@ sed -e "s|%bindir%|$(abs_top_builddir)/bin|g" \ -e "s|%localconfdir%|$(abs_top_builddir)/etc/couchdb|g" \ -e "s|%localdatadir%|$(abs_top_srcdir)/share|g" \ - -e "s|%localbuilddatadir%|$(abs_top_builddir)/share|g" \ + -e "s|%localbuilddatadir%|$(abs_top_builddir)/share|g" \ -e "s|%localstatelibdir%|$(abs_top_builddir)/tmp/lib|g" \ -e "s|%localstatelogdir%|$(abs_top_builddir)/tmp/log|g" \ -e "s|%localstaterundir%|$(abs_top_builddir)/tmp/run|g" \ -e "s|%couchprivlibdir%|$(devcouchprivlibdir)|g" \ -e "s|%couchjs_command_name%|$(couchjs_dev_command_name)|g" \ + -e "s|%package_author_name%|$(package_author_name)|g" \ + -e "s|%version%|$(version)|g" \ < $< > $@ # Noah said to not specify local.ini but it borks @@ -67,6 +73,7 @@ local_dev.ini: local.ini if test ! -f "$@"; then \ cp $< $@; \ + chmod +w $@; \ fi install-data-hook: diff -Nru couchdb-1.2.0/etc/couchdb/Makefile.in couchdb-1.4.0~rc.1/etc/couchdb/Makefile.in --- couchdb-1.2.0/etc/couchdb/Makefile.in 2012-03-29 17:05:38.000000000 -0400 +++ couchdb-1.4.0~rc.1/etc/couchdb/Makefile.in 2013-08-23 10:57:40.000000000 -0400 @@ -1,8 +1,9 @@ -# Makefile.in generated by automake 1.10 from Makefile.am. +# Makefile.in generated by automake 1.11.6 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, -# 2003, 2004, 2005, 2006 Free Software Foundation, Inc. +# 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software +# Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. @@ -15,9 +16,27 @@ @SET_MAKE@ VPATH = @srcdir@ +am__make_dryrun = \ + { \ + am__dry=no; \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + echo 'am--echo: ; @echo "AM" OK' | $(MAKE) -f - 2>/dev/null \ + | grep '^AM OK$$' >/dev/null || am__dry=yes;; \ + *) \ + for am__flg in $$MAKEFLAGS; do \ + case $$am__flg in \ + *=*|--*) ;; \ + *n*) am__dry=yes; break;; \ + esac; \ + done;; \ + esac; \ + test $$am__dry = yes; \ + } pkgdatadir = $(datadir)/@PACKAGE@ -pkglibdir = $(libdir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c @@ -35,28 +54,52 @@ DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in \ $(srcdir)/default.ini.tpl.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 -am__aclocal_m4_deps = $(top_srcdir)/m4/ac_check_curl.m4 \ - $(top_srcdir)/m4/ac_check_icu.m4 $(top_srcdir)/m4/libtool.m4 \ +am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \ $(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \ $(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \ - $(top_srcdir)/m4/pkg.m4 $(top_srcdir)/acinclude.m4 \ - $(top_srcdir)/configure.ac + $(top_srcdir)/acinclude.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h \ $(top_builddir)/src/snappy/google-snappy/config.h CONFIG_CLEAN_FILES = default.ini.tpl +CONFIG_CLEAN_VPATH_FILES = SOURCES = DIST_SOURCES = +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; -am__strip_dir = `echo $$p | sed -e 's|^.*/||'`; +am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; +am__install_max = 40 +am__nobase_strip_setup = \ + srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` +am__nobase_strip = \ + for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" +am__nobase_list = $(am__nobase_strip_setup); \ + for p in $$list; do echo "$$p $$p"; done | \ + sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ + $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ + if (++n[$$2] == $(am__install_max)) \ + { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ + END { for (dir in files) print dir, files[dir] }' +am__base_list = \ + sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ + sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' +am__uninstall_files_from_dir = { \ + test -z "$$files" \ + || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ + || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ + $(am__cd) "$$dir" && rm -f $$files; }; \ + } am__installdirs = "$(DESTDIR)$(localconfdir)" -localconfDATA_INSTALL = $(INSTALL_DATA) DATA = $(localconf_DATA) $(noinst_DATA) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) transform = @program_transform_name@ @@ -98,7 +141,11 @@ FGREP = @FGREP@ FLAGS = @FLAGS@ GREP = @GREP@ -HELP2MAN_EXECUTABLE = @HELP2MAN_EXECUTABLE@ +HAS_HELP2MAN = @HAS_HELP2MAN@ +HAS_INSTALLINFO = @HAS_INSTALLINFO@ +HAS_MAKEINFO = @HAS_MAKEINFO@ +HAS_PDFLATEX = @HAS_PDFLATEX@ +HAS_SPHINX_BUILD = @HAS_SPHINX_BUILD@ ICU_BIN = @ICU_BIN@ ICU_CFLAGS = @ICU_CFLAGS@ ICU_CONFIG = @ICU_CONFIG@ @@ -111,10 +158,7 @@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ -JS185_CFLAGS = @JS185_CFLAGS@ -JS185_LIBS = @JS185_LIBS@ JS_CFLAGS = @JS_CFLAGS@ -JS_LDFLAGS = @JS_LDFLAGS@ JS_LIBS = @JS_LIBS@ JS_LIB_BINARY = @JS_LIB_BINARY@ LD = @LD@ @@ -126,6 +170,7 @@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ +MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ NM = @NM@ NMEDIT = @NMEDIT@ @@ -138,6 +183,7 @@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PKG_CONFIG = @PKG_CONFIG@ @@ -156,6 +202,7 @@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ +ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ @@ -224,6 +271,7 @@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ +top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ version = @version@ @@ -247,14 +295,14 @@ @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \ - && exit 0; \ + ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ + && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign etc/couchdb/Makefile'; \ - cd $(top_srcdir) && \ - $(AUTOMAKE) --foreign etc/couchdb/Makefile + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign etc/couchdb/Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --foreign etc/couchdb/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ @@ -272,6 +320,7 @@ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(am__aclocal_m4_deps): default.ini.tpl: $(top_builddir)/config.status $(srcdir)/default.ini.tpl.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ @@ -282,21 +331,25 @@ -rm -rf .libs _libs install-localconfDATA: $(localconf_DATA) @$(NORMAL_INSTALL) - test -z "$(localconfdir)" || $(MKDIR_P) "$(DESTDIR)$(localconfdir)" - @list='$(localconf_DATA)'; for p in $$list; do \ + @list='$(localconf_DATA)'; test -n "$(localconfdir)" || list=; \ + if test -n "$$list"; then \ + echo " $(MKDIR_P) '$(DESTDIR)$(localconfdir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(localconfdir)" || exit 1; \ + fi; \ + for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ - f=$(am__strip_dir) \ - echo " $(localconfDATA_INSTALL) '$$d$$p' '$(DESTDIR)$(localconfdir)/$$f'"; \ - $(localconfDATA_INSTALL) "$$d$$p" "$(DESTDIR)$(localconfdir)/$$f"; \ + echo "$$d$$p"; \ + done | $(am__base_list) | \ + while read files; do \ + echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(localconfdir)'"; \ + $(INSTALL_DATA) $$files "$(DESTDIR)$(localconfdir)" || exit $$?; \ done uninstall-localconfDATA: @$(NORMAL_UNINSTALL) - @list='$(localconf_DATA)'; for p in $$list; do \ - f=$(am__strip_dir) \ - echo " rm -f '$(DESTDIR)$(localconfdir)/$$f'"; \ - rm -f "$(DESTDIR)$(localconfdir)/$$f"; \ - done + @list='$(localconf_DATA)'; test -n "$(localconfdir)" || list=; \ + files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ + dir='$(DESTDIR)$(localconfdir)'; $(am__uninstall_files_from_dir) tags: TAGS TAGS: @@ -320,13 +373,17 @@ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ + if test -d "$(distdir)/$$file"; then \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ - cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \ + cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ - cp -pR $$d/$$file $(distdir)$$dir || exit 1; \ + cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ - test -f $(distdir)/$$file \ - || cp -p $$d/$$file $(distdir)/$$file \ + test -f "$(distdir)/$$file" \ + || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @@ -347,10 +404,15 @@ installcheck: installcheck-am install-strip: - $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ - install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ - `test -z '$(STRIP)' || \ - echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install + if test -z '$(STRIP)'; then \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + install; \ + else \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ + fi mostlyclean-generic: clean-generic: @@ -358,6 +420,7 @@ distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @@ -376,6 +439,8 @@ html: html-am +html-am: + info: info-am info-am: @@ -383,21 +448,30 @@ install-data-am: install-localconfDATA @$(NORMAL_INSTALL) $(MAKE) $(AM_MAKEFLAGS) install-data-hook - install-dvi: install-dvi-am +install-dvi-am: + install-exec-am: install-html: install-html-am +install-html-am: + install-info: install-info-am +install-info-am: + install-man: install-pdf: install-pdf-am +install-pdf-am: + install-ps: install-ps-am +install-ps-am: + installcheck-am: maintainer-clean: maintainer-clean-am @@ -438,35 +512,41 @@ @WINDOWS_TRUE@ sed -e "s|%bindir%|.|g" \ @WINDOWS_TRUE@ -e "s|%localconfdir%|$(localconfdir)|g" \ @WINDOWS_TRUE@ -e "s|%localdatadir%|../share/couchdb|g" \ -@WINDOWS_TRUE@ -e "s|%localbuilddatadir%|../share/couchdb|g" \ +@WINDOWS_TRUE@ -e "s|%localbuilddatadir%|../share/couchdb|g" \ @WINDOWS_TRUE@ -e "s|%localstatelibdir%|../var/lib/couchdb|g" \ @WINDOWS_TRUE@ -e "s|%localstatelogdir%|../var/log/couchdb|g" \ @WINDOWS_TRUE@ -e "s|%localstaterundir%|../var/run/couchdb|g" \ @WINDOWS_TRUE@ -e "s|%couchprivlibdir%|../lib/couch-$(version)/priv/lib|g" \ @WINDOWS_TRUE@ -e "s|%couchjs_command_name%|couchjs.exe|g" \ +@WINDOWS_TRUE@ -e "s|%package_author_name%|$(package_author_name)|g" \ +@WINDOWS_TRUE@ -e "s|%version%|$(version)|g" \ @WINDOWS_TRUE@ < $< > $@ @WINDOWS_FALSE@default.ini: default.ini.tpl @WINDOWS_FALSE@ sed -e "s|%bindir%|$(bindir)|g" \ @WINDOWS_FALSE@ -e "s|%localconfdir%|$(localconfdir)|g" \ @WINDOWS_FALSE@ -e "s|%localdatadir%|$(localdatadir)|g" \ -@WINDOWS_FALSE@ -e "s|%localbuilddatadir%|$(localdatadir)|g" \ +@WINDOWS_FALSE@ -e "s|%localbuilddatadir%|$(localdatadir)|g" \ @WINDOWS_FALSE@ -e "s|%localstatelibdir%|$(localstatelibdir)|g" \ @WINDOWS_FALSE@ -e "s|%localstatelogdir%|$(localstatelogdir)|g" \ @WINDOWS_FALSE@ -e "s|%localstaterundir%|$(localstaterundir)|g" \ @WINDOWS_FALSE@ -e "s|%couchprivlibdir%|$(couchprivlibdir)|g" \ @WINDOWS_FALSE@ -e "s|%couchjs_command_name%|$(couchjs_command_name)|g" \ +@WINDOWS_FALSE@ -e "s|%package_author_name%|$(package_author_name)|g" \ +@WINDOWS_FALSE@ -e "s|%version%|$(version)|g" \ @WINDOWS_FALSE@ < $< > $@ default_dev.ini: default.ini.tpl sed -e "s|%bindir%|$(abs_top_builddir)/bin|g" \ -e "s|%localconfdir%|$(abs_top_builddir)/etc/couchdb|g" \ -e "s|%localdatadir%|$(abs_top_srcdir)/share|g" \ - -e "s|%localbuilddatadir%|$(abs_top_builddir)/share|g" \ + -e "s|%localbuilddatadir%|$(abs_top_builddir)/share|g" \ -e "s|%localstatelibdir%|$(abs_top_builddir)/tmp/lib|g" \ -e "s|%localstatelogdir%|$(abs_top_builddir)/tmp/log|g" \ -e "s|%localstaterundir%|$(abs_top_builddir)/tmp/run|g" \ -e "s|%couchprivlibdir%|$(devcouchprivlibdir)|g" \ -e "s|%couchjs_command_name%|$(couchjs_dev_command_name)|g" \ + -e "s|%package_author_name%|$(package_author_name)|g" \ + -e "s|%version%|$(version)|g" \ < $< > $@ # Noah said to not specify local.ini but it borks @@ -474,6 +554,7 @@ local_dev.ini: local.ini if test ! -f "$@"; then \ cp $< $@; \ + chmod +w $@; \ fi install-data-hook: @@ -491,6 +572,7 @@ uninstall-local: rm -f "$(DESTDIR)/$(localconfdir)/local.ini" + # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: diff -Nru couchdb-1.2.0/etc/default/Makefile.in couchdb-1.4.0~rc.1/etc/default/Makefile.in --- couchdb-1.2.0/etc/default/Makefile.in 2012-03-29 17:05:38.000000000 -0400 +++ couchdb-1.4.0~rc.1/etc/default/Makefile.in 2013-08-23 10:57:40.000000000 -0400 @@ -1,8 +1,9 @@ -# Makefile.in generated by automake 1.10 from Makefile.am. +# Makefile.in generated by automake 1.11.6 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, -# 2003, 2004, 2005, 2006 Free Software Foundation, Inc. +# 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software +# Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. @@ -14,9 +15,27 @@ @SET_MAKE@ VPATH = @srcdir@ +am__make_dryrun = \ + { \ + am__dry=no; \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + echo 'am--echo: ; @echo "AM" OK' | $(MAKE) -f - 2>/dev/null \ + | grep '^AM OK$$' >/dev/null || am__dry=yes;; \ + *) \ + for am__flg in $$MAKEFLAGS; do \ + case $$am__flg in \ + *=*|--*) ;; \ + *n*) am__dry=yes; break;; \ + esac; \ + done;; \ + esac; \ + test $$am__dry = yes; \ + } pkgdatadir = $(datadir)/@PACKAGE@ -pkglibdir = $(libdir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c @@ -34,20 +53,24 @@ subdir = etc/default DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 -am__aclocal_m4_deps = $(top_srcdir)/m4/ac_check_curl.m4 \ - $(top_srcdir)/m4/ac_check_icu.m4 $(top_srcdir)/m4/libtool.m4 \ +am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \ $(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \ $(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \ - $(top_srcdir)/m4/pkg.m4 $(top_srcdir)/acinclude.m4 \ - $(top_srcdir)/configure.ac + $(top_srcdir)/acinclude.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h \ $(top_builddir)/src/snappy/google-snappy/config.h CONFIG_CLEAN_FILES = +CONFIG_CLEAN_VPATH_FILES = SOURCES = DIST_SOURCES = +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ @@ -87,7 +110,11 @@ FGREP = @FGREP@ FLAGS = @FLAGS@ GREP = @GREP@ -HELP2MAN_EXECUTABLE = @HELP2MAN_EXECUTABLE@ +HAS_HELP2MAN = @HAS_HELP2MAN@ +HAS_INSTALLINFO = @HAS_INSTALLINFO@ +HAS_MAKEINFO = @HAS_MAKEINFO@ +HAS_PDFLATEX = @HAS_PDFLATEX@ +HAS_SPHINX_BUILD = @HAS_SPHINX_BUILD@ ICU_BIN = @ICU_BIN@ ICU_CFLAGS = @ICU_CFLAGS@ ICU_CONFIG = @ICU_CONFIG@ @@ -100,10 +127,7 @@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ -JS185_CFLAGS = @JS185_CFLAGS@ -JS185_LIBS = @JS185_LIBS@ JS_CFLAGS = @JS_CFLAGS@ -JS_LDFLAGS = @JS_LDFLAGS@ JS_LIBS = @JS_LIBS@ JS_LIB_BINARY = @JS_LIB_BINARY@ LD = @LD@ @@ -115,6 +139,7 @@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ +MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ NM = @NM@ NMEDIT = @NMEDIT@ @@ -127,6 +152,7 @@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PKG_CONFIG = @PKG_CONFIG@ @@ -145,6 +171,7 @@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ +ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ @@ -213,6 +240,7 @@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ +top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ version = @version@ @@ -228,14 +256,14 @@ @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \ - && exit 0; \ + ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ + && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign etc/default/Makefile'; \ - cd $(top_srcdir) && \ - $(AUTOMAKE) --foreign etc/default/Makefile + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign etc/default/Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --foreign etc/default/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ @@ -253,6 +281,7 @@ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo @@ -282,13 +311,17 @@ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ + if test -d "$(distdir)/$$file"; then \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ - cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \ + cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ - cp -pR $$d/$$file $(distdir)$$dir || exit 1; \ + cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ - test -f $(distdir)/$$file \ - || cp -p $$d/$$file $(distdir)/$$file \ + test -f "$(distdir)/$$file" \ + || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @@ -306,16 +339,22 @@ installcheck: installcheck-am install-strip: - $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ - install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ - `test -z '$(STRIP)' || \ - echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install + if test -z '$(STRIP)'; then \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + install; \ + else \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ + fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @@ -334,6 +373,8 @@ html: html-am +html-am: + info: info-am info-am: @@ -342,18 +383,28 @@ install-dvi: install-dvi-am +install-dvi-am: + install-exec-am: install-html: install-html-am +install-html-am: + install-info: install-info-am +install-info-am: + install-man: install-pdf: install-pdf-am +install-pdf-am: + install-ps: install-ps-am +install-ps-am: + installcheck-am: maintainer-clean: maintainer-clean-am @@ -387,6 +438,7 @@ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am uninstall uninstall-am + # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: diff -Nru couchdb-1.2.0/etc/init/couchdb.tpl.in couchdb-1.4.0~rc.1/etc/init/couchdb.tpl.in --- couchdb-1.2.0/etc/init/couchdb.tpl.in 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/etc/init/couchdb.tpl.in 2013-08-23 10:57:21.000000000 -0400 @@ -84,6 +84,9 @@ # Start Apache CouchDB as a background process. mkdir -p "$RUN_DIR" + if test -n "$COUCHDB_USER"; then + chown $COUCHDB_USER "$RUN_DIR" + fi command="$COUCHDB -b" if test -n "$COUCHDB_STDOUT_FILE"; then command="$command -o $COUCHDB_STDOUT_FILE" diff -Nru couchdb-1.2.0/etc/init/Makefile.in couchdb-1.4.0~rc.1/etc/init/Makefile.in --- couchdb-1.2.0/etc/init/Makefile.in 2012-03-29 17:05:38.000000000 -0400 +++ couchdb-1.4.0~rc.1/etc/init/Makefile.in 2013-08-23 10:57:40.000000000 -0400 @@ -1,8 +1,9 @@ -# Makefile.in generated by automake 1.10 from Makefile.am. +# Makefile.in generated by automake 1.11.6 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, -# 2003, 2004, 2005, 2006 Free Software Foundation, Inc. +# 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software +# Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. @@ -14,9 +15,27 @@ @SET_MAKE@ VPATH = @srcdir@ +am__make_dryrun = \ + { \ + am__dry=no; \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + echo 'am--echo: ; @echo "AM" OK' | $(MAKE) -f - 2>/dev/null \ + | grep '^AM OK$$' >/dev/null || am__dry=yes;; \ + *) \ + for am__flg in $$MAKEFLAGS; do \ + case $$am__flg in \ + *=*|--*) ;; \ + *n*) am__dry=yes; break;; \ + esac; \ + done;; \ + esac; \ + test $$am__dry = yes; \ + } pkgdatadir = $(datadir)/@PACKAGE@ -pkglibdir = $(libdir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c @@ -35,20 +54,24 @@ DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in \ $(srcdir)/couchdb.tpl.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 -am__aclocal_m4_deps = $(top_srcdir)/m4/ac_check_curl.m4 \ - $(top_srcdir)/m4/ac_check_icu.m4 $(top_srcdir)/m4/libtool.m4 \ +am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \ $(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \ $(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \ - $(top_srcdir)/m4/pkg.m4 $(top_srcdir)/acinclude.m4 \ - $(top_srcdir)/configure.ac + $(top_srcdir)/acinclude.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h \ $(top_builddir)/src/snappy/google-snappy/config.h CONFIG_CLEAN_FILES = couchdb.tpl +CONFIG_CLEAN_VPATH_FILES = SOURCES = DIST_SOURCES = +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ @@ -88,7 +111,11 @@ FGREP = @FGREP@ FLAGS = @FLAGS@ GREP = @GREP@ -HELP2MAN_EXECUTABLE = @HELP2MAN_EXECUTABLE@ +HAS_HELP2MAN = @HAS_HELP2MAN@ +HAS_INSTALLINFO = @HAS_INSTALLINFO@ +HAS_MAKEINFO = @HAS_MAKEINFO@ +HAS_PDFLATEX = @HAS_PDFLATEX@ +HAS_SPHINX_BUILD = @HAS_SPHINX_BUILD@ ICU_BIN = @ICU_BIN@ ICU_CFLAGS = @ICU_CFLAGS@ ICU_CONFIG = @ICU_CONFIG@ @@ -101,10 +128,7 @@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ -JS185_CFLAGS = @JS185_CFLAGS@ -JS185_LIBS = @JS185_LIBS@ JS_CFLAGS = @JS_CFLAGS@ -JS_LDFLAGS = @JS_LDFLAGS@ JS_LIBS = @JS_LIBS@ JS_LIB_BINARY = @JS_LIB_BINARY@ LD = @LD@ @@ -116,6 +140,7 @@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ +MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ NM = @NM@ NMEDIT = @NMEDIT@ @@ -128,6 +153,7 @@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PKG_CONFIG = @PKG_CONFIG@ @@ -146,6 +172,7 @@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ +ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ @@ -214,6 +241,7 @@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ +top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ version = @version@ @@ -229,14 +257,14 @@ @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \ - && exit 0; \ + ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ + && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign etc/init/Makefile'; \ - cd $(top_srcdir) && \ - $(AUTOMAKE) --foreign etc/init/Makefile + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign etc/init/Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --foreign etc/init/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ @@ -254,6 +282,7 @@ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(am__aclocal_m4_deps): couchdb.tpl: $(top_builddir)/config.status $(srcdir)/couchdb.tpl.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ @@ -285,13 +314,17 @@ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ + if test -d "$(distdir)/$$file"; then \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ - cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \ + cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ - cp -pR $$d/$$file $(distdir)$$dir || exit 1; \ + cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ - test -f $(distdir)/$$file \ - || cp -p $$d/$$file $(distdir)/$$file \ + test -f "$(distdir)/$$file" \ + || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @@ -309,16 +342,22 @@ installcheck: installcheck-am install-strip: - $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ - install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ - `test -z '$(STRIP)' || \ - echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install + if test -z '$(STRIP)'; then \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + install; \ + else \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ + fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @@ -337,6 +376,8 @@ html: html-am +html-am: + info: info-am info-am: @@ -345,18 +386,28 @@ install-dvi: install-dvi-am +install-dvi-am: + install-exec-am: install-html: install-html-am +install-html-am: + install-info: install-info-am +install-info-am: + install-man: install-pdf: install-pdf-am +install-pdf-am: + install-ps: install-ps-am +install-ps-am: + installcheck-am: maintainer-clean: maintainer-clean-am @@ -390,6 +441,7 @@ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am uninstall uninstall-am + # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: diff -Nru couchdb-1.2.0/etc/launchd/Makefile.in couchdb-1.4.0~rc.1/etc/launchd/Makefile.in --- couchdb-1.2.0/etc/launchd/Makefile.in 2012-03-29 17:05:38.000000000 -0400 +++ couchdb-1.4.0~rc.1/etc/launchd/Makefile.in 2013-08-23 10:57:40.000000000 -0400 @@ -1,8 +1,9 @@ -# Makefile.in generated by automake 1.10 from Makefile.am. +# Makefile.in generated by automake 1.11.6 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, -# 2003, 2004, 2005, 2006 Free Software Foundation, Inc. +# 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software +# Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. @@ -14,9 +15,27 @@ @SET_MAKE@ VPATH = @srcdir@ +am__make_dryrun = \ + { \ + am__dry=no; \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + echo 'am--echo: ; @echo "AM" OK' | $(MAKE) -f - 2>/dev/null \ + | grep '^AM OK$$' >/dev/null || am__dry=yes;; \ + *) \ + for am__flg in $$MAKEFLAGS; do \ + case $$am__flg in \ + *=*|--*) ;; \ + *n*) am__dry=yes; break;; \ + esac; \ + done;; \ + esac; \ + test $$am__dry = yes; \ + } pkgdatadir = $(datadir)/@PACKAGE@ -pkglibdir = $(libdir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c @@ -35,20 +54,24 @@ DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in \ $(srcdir)/org.apache.couchdb.plist.tpl.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 -am__aclocal_m4_deps = $(top_srcdir)/m4/ac_check_curl.m4 \ - $(top_srcdir)/m4/ac_check_icu.m4 $(top_srcdir)/m4/libtool.m4 \ +am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \ $(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \ $(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \ - $(top_srcdir)/m4/pkg.m4 $(top_srcdir)/acinclude.m4 \ - $(top_srcdir)/configure.ac + $(top_srcdir)/acinclude.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h \ $(top_builddir)/src/snappy/google-snappy/config.h CONFIG_CLEAN_FILES = org.apache.couchdb.plist.tpl +CONFIG_CLEAN_VPATH_FILES = SOURCES = DIST_SOURCES = +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ @@ -88,7 +111,11 @@ FGREP = @FGREP@ FLAGS = @FLAGS@ GREP = @GREP@ -HELP2MAN_EXECUTABLE = @HELP2MAN_EXECUTABLE@ +HAS_HELP2MAN = @HAS_HELP2MAN@ +HAS_INSTALLINFO = @HAS_INSTALLINFO@ +HAS_MAKEINFO = @HAS_MAKEINFO@ +HAS_PDFLATEX = @HAS_PDFLATEX@ +HAS_SPHINX_BUILD = @HAS_SPHINX_BUILD@ ICU_BIN = @ICU_BIN@ ICU_CFLAGS = @ICU_CFLAGS@ ICU_CONFIG = @ICU_CONFIG@ @@ -101,10 +128,7 @@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ -JS185_CFLAGS = @JS185_CFLAGS@ -JS185_LIBS = @JS185_LIBS@ JS_CFLAGS = @JS_CFLAGS@ -JS_LDFLAGS = @JS_LDFLAGS@ JS_LIBS = @JS_LIBS@ JS_LIB_BINARY = @JS_LIB_BINARY@ LD = @LD@ @@ -116,6 +140,7 @@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ +MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ NM = @NM@ NMEDIT = @NMEDIT@ @@ -128,6 +153,7 @@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PKG_CONFIG = @PKG_CONFIG@ @@ -146,6 +172,7 @@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ +ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ @@ -214,6 +241,7 @@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ +top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ version = @version@ @@ -229,14 +257,14 @@ @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \ - && exit 0; \ + ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ + && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign etc/launchd/Makefile'; \ - cd $(top_srcdir) && \ - $(AUTOMAKE) --foreign etc/launchd/Makefile + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign etc/launchd/Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --foreign etc/launchd/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ @@ -254,6 +282,7 @@ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(am__aclocal_m4_deps): org.apache.couchdb.plist.tpl: $(top_builddir)/config.status $(srcdir)/org.apache.couchdb.plist.tpl.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ @@ -285,13 +314,17 @@ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ + if test -d "$(distdir)/$$file"; then \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ - cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \ + cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ - cp -pR $$d/$$file $(distdir)$$dir || exit 1; \ + cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ - test -f $(distdir)/$$file \ - || cp -p $$d/$$file $(distdir)/$$file \ + test -f "$(distdir)/$$file" \ + || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @@ -309,16 +342,22 @@ installcheck: installcheck-am install-strip: - $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ - install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ - `test -z '$(STRIP)' || \ - echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install + if test -z '$(STRIP)'; then \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + install; \ + else \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ + fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @@ -337,6 +376,8 @@ html: html-am +html-am: + info: info-am info-am: @@ -345,18 +386,28 @@ install-dvi: install-dvi-am +install-dvi-am: + install-exec-am: install-html: install-html-am +install-html-am: + install-info: install-info-am +install-info-am: + install-man: install-pdf: install-pdf-am +install-pdf-am: + install-ps: install-ps-am +install-ps-am: + installcheck-am: maintainer-clean: maintainer-clean-am @@ -390,6 +441,7 @@ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am uninstall uninstall-am + # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: diff -Nru couchdb-1.2.0/etc/logrotate.d/Makefile.in couchdb-1.4.0~rc.1/etc/logrotate.d/Makefile.in --- couchdb-1.2.0/etc/logrotate.d/Makefile.in 2012-03-29 17:05:38.000000000 -0400 +++ couchdb-1.4.0~rc.1/etc/logrotate.d/Makefile.in 2013-08-23 10:57:40.000000000 -0400 @@ -1,8 +1,9 @@ -# Makefile.in generated by automake 1.10 from Makefile.am. +# Makefile.in generated by automake 1.11.6 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, -# 2003, 2004, 2005, 2006 Free Software Foundation, Inc. +# 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software +# Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. @@ -14,9 +15,27 @@ @SET_MAKE@ VPATH = @srcdir@ +am__make_dryrun = \ + { \ + am__dry=no; \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + echo 'am--echo: ; @echo "AM" OK' | $(MAKE) -f - 2>/dev/null \ + | grep '^AM OK$$' >/dev/null || am__dry=yes;; \ + *) \ + for am__flg in $$MAKEFLAGS; do \ + case $$am__flg in \ + *=*|--*) ;; \ + *n*) am__dry=yes; break;; \ + esac; \ + done;; \ + esac; \ + test $$am__dry = yes; \ + } pkgdatadir = $(datadir)/@PACKAGE@ -pkglibdir = $(libdir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c @@ -35,20 +54,24 @@ DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in \ $(srcdir)/couchdb.tpl.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 -am__aclocal_m4_deps = $(top_srcdir)/m4/ac_check_curl.m4 \ - $(top_srcdir)/m4/ac_check_icu.m4 $(top_srcdir)/m4/libtool.m4 \ +am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \ $(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \ $(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \ - $(top_srcdir)/m4/pkg.m4 $(top_srcdir)/acinclude.m4 \ - $(top_srcdir)/configure.ac + $(top_srcdir)/acinclude.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h \ $(top_builddir)/src/snappy/google-snappy/config.h CONFIG_CLEAN_FILES = couchdb.tpl +CONFIG_CLEAN_VPATH_FILES = SOURCES = DIST_SOURCES = +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ @@ -88,7 +111,11 @@ FGREP = @FGREP@ FLAGS = @FLAGS@ GREP = @GREP@ -HELP2MAN_EXECUTABLE = @HELP2MAN_EXECUTABLE@ +HAS_HELP2MAN = @HAS_HELP2MAN@ +HAS_INSTALLINFO = @HAS_INSTALLINFO@ +HAS_MAKEINFO = @HAS_MAKEINFO@ +HAS_PDFLATEX = @HAS_PDFLATEX@ +HAS_SPHINX_BUILD = @HAS_SPHINX_BUILD@ ICU_BIN = @ICU_BIN@ ICU_CFLAGS = @ICU_CFLAGS@ ICU_CONFIG = @ICU_CONFIG@ @@ -101,10 +128,7 @@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ -JS185_CFLAGS = @JS185_CFLAGS@ -JS185_LIBS = @JS185_LIBS@ JS_CFLAGS = @JS_CFLAGS@ -JS_LDFLAGS = @JS_LDFLAGS@ JS_LIBS = @JS_LIBS@ JS_LIB_BINARY = @JS_LIB_BINARY@ LD = @LD@ @@ -116,6 +140,7 @@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ +MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ NM = @NM@ NMEDIT = @NMEDIT@ @@ -128,6 +153,7 @@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PKG_CONFIG = @PKG_CONFIG@ @@ -146,6 +172,7 @@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ +ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ @@ -214,6 +241,7 @@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ +top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ version = @version@ @@ -229,14 +257,14 @@ @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \ - && exit 0; \ + ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ + && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign etc/logrotate.d/Makefile'; \ - cd $(top_srcdir) && \ - $(AUTOMAKE) --foreign etc/logrotate.d/Makefile + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign etc/logrotate.d/Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --foreign etc/logrotate.d/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ @@ -254,6 +282,7 @@ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(am__aclocal_m4_deps): couchdb.tpl: $(top_builddir)/config.status $(srcdir)/couchdb.tpl.in cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ @@ -285,13 +314,17 @@ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ + if test -d "$(distdir)/$$file"; then \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ - cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \ + cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ - cp -pR $$d/$$file $(distdir)$$dir || exit 1; \ + cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ - test -f $(distdir)/$$file \ - || cp -p $$d/$$file $(distdir)/$$file \ + test -f "$(distdir)/$$file" \ + || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @@ -309,16 +342,22 @@ installcheck: installcheck-am install-strip: - $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ - install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ - `test -z '$(STRIP)' || \ - echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install + if test -z '$(STRIP)'; then \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + install; \ + else \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ + fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @@ -337,6 +376,8 @@ html: html-am +html-am: + info: info-am info-am: @@ -345,18 +386,28 @@ install-dvi: install-dvi-am +install-dvi-am: + install-exec-am: install-html: install-html-am +install-html-am: + install-info: install-info-am +install-info-am: + install-man: install-pdf: install-pdf-am +install-pdf-am: + install-ps: install-ps-am +install-ps-am: + installcheck-am: maintainer-clean: maintainer-clean-am @@ -390,6 +441,7 @@ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am uninstall uninstall-am + # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: diff -Nru couchdb-1.2.0/etc/Makefile.in couchdb-1.4.0~rc.1/etc/Makefile.in --- couchdb-1.2.0/etc/Makefile.in 2012-03-29 17:05:38.000000000 -0400 +++ couchdb-1.4.0~rc.1/etc/Makefile.in 2013-08-23 10:57:40.000000000 -0400 @@ -1,8 +1,9 @@ -# Makefile.in generated by automake 1.10 from Makefile.am. +# Makefile.in generated by automake 1.11.6 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, -# 2003, 2004, 2005, 2006 Free Software Foundation, Inc. +# 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software +# Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. @@ -15,9 +16,27 @@ @SET_MAKE@ VPATH = @srcdir@ +am__make_dryrun = \ + { \ + am__dry=no; \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + echo 'am--echo: ; @echo "AM" OK' | $(MAKE) -f - 2>/dev/null \ + | grep '^AM OK$$' >/dev/null || am__dry=yes;; \ + *) \ + for am__flg in $$MAKEFLAGS; do \ + case $$am__flg in \ + *=*|--*) ;; \ + *n*) am__dry=yes; break;; \ + esac; \ + done;; \ + esac; \ + test $$am__dry = yes; \ + } pkgdatadir = $(datadir)/@PACKAGE@ -pkglibdir = $(libdir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c @@ -38,18 +57,17 @@ subdir = etc DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 -am__aclocal_m4_deps = $(top_srcdir)/m4/ac_check_curl.m4 \ - $(top_srcdir)/m4/ac_check_icu.m4 $(top_srcdir)/m4/libtool.m4 \ +am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \ $(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \ $(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \ - $(top_srcdir)/m4/pkg.m4 $(top_srcdir)/acinclude.m4 \ - $(top_srcdir)/configure.ac + $(top_srcdir)/acinclude.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h \ $(top_builddir)/src/snappy/google-snappy/config.h CONFIG_CLEAN_FILES = +CONFIG_CLEAN_VPATH_FILES = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive dvi-recursive \ @@ -59,24 +77,75 @@ install-pdf-recursive install-ps-recursive install-recursive \ installcheck-recursive installdirs-recursive pdf-recursive \ ps-recursive uninstall-recursive +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; -am__strip_dir = `echo $$p | sed -e 's|^.*/||'`; +am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; +am__install_max = 40 +am__nobase_strip_setup = \ + srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` +am__nobase_strip = \ + for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" +am__nobase_list = $(am__nobase_strip_setup); \ + for p in $$list; do echo "$$p $$p"; done | \ + sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ + $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ + if (++n[$$2] == $(am__install_max)) \ + { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ + END { for (dir in files) print dir, files[dir] }' +am__base_list = \ + sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ + sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' +am__uninstall_files_from_dir = { \ + test -z "$$files" \ + || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ + || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ + $(am__cd) "$$dir" && rm -f $$files; }; \ + } am__installdirs = "$(DESTDIR)$(initdir)" "$(DESTDIR)$(launchddir)" \ "$(DESTDIR)$(sysconfdir)" -initDATA_INSTALL = $(INSTALL_DATA) -launchdDATA_INSTALL = $(INSTALL_DATA) -nobase_sysconfDATA_INSTALL = $(install_sh_DATA) DATA = $(init_DATA) $(launchd_DATA) $(nobase_sysconf_DATA) RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive +AM_RECURSIVE_TARGETS = $(RECURSIVE_TARGETS:-recursive=) \ + $(RECURSIVE_CLEAN_TARGETS:-recursive=) tags TAGS ctags CTAGS \ + distdir ETAGS = etags CTAGS = ctags DIST_SUBDIRS = $(SUBDIRS) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) +am__relativize = \ + dir0=`pwd`; \ + sed_first='s,^\([^/]*\)/.*$$,\1,'; \ + sed_rest='s,^[^/]*/*,,'; \ + sed_last='s,^.*/\([^/]*\)$$,\1,'; \ + sed_butlast='s,/*[^/]*$$,,'; \ + while test -n "$$dir1"; do \ + first=`echo "$$dir1" | sed -e "$$sed_first"`; \ + if test "$$first" != "."; then \ + if test "$$first" = ".."; then \ + dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ + dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ + else \ + first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ + if test "$$first2" = "$$first"; then \ + dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ + else \ + dir2="../$$dir2"; \ + fi; \ + dir0="$$dir0"/"$$first"; \ + fi; \ + fi; \ + dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ + done; \ + reldir="$$dir2" transform = @program_transform_name@ ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ @@ -116,7 +185,11 @@ FGREP = @FGREP@ FLAGS = @FLAGS@ GREP = @GREP@ -HELP2MAN_EXECUTABLE = @HELP2MAN_EXECUTABLE@ +HAS_HELP2MAN = @HAS_HELP2MAN@ +HAS_INSTALLINFO = @HAS_INSTALLINFO@ +HAS_MAKEINFO = @HAS_MAKEINFO@ +HAS_PDFLATEX = @HAS_PDFLATEX@ +HAS_SPHINX_BUILD = @HAS_SPHINX_BUILD@ ICU_BIN = @ICU_BIN@ ICU_CFLAGS = @ICU_CFLAGS@ ICU_CONFIG = @ICU_CONFIG@ @@ -129,10 +202,7 @@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ -JS185_CFLAGS = @JS185_CFLAGS@ -JS185_LIBS = @JS185_LIBS@ JS_CFLAGS = @JS_CFLAGS@ -JS_LDFLAGS = @JS_LDFLAGS@ JS_LIBS = @JS_LIBS@ JS_LIB_BINARY = @JS_LIB_BINARY@ LD = @LD@ @@ -144,6 +214,7 @@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ +MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ NM = @NM@ NMEDIT = @NMEDIT@ @@ -156,6 +227,7 @@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PKG_CONFIG = @PKG_CONFIG@ @@ -174,6 +246,7 @@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ +ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ @@ -242,6 +315,7 @@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ +top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ version = @version@ @@ -267,14 +341,14 @@ @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \ - && exit 0; \ + ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ + && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign etc/Makefile'; \ - cd $(top_srcdir) && \ - $(AUTOMAKE) --foreign etc/Makefile + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign etc/Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --foreign etc/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ @@ -292,6 +366,7 @@ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo @@ -300,57 +375,70 @@ -rm -rf .libs _libs install-initDATA: $(init_DATA) @$(NORMAL_INSTALL) - test -z "$(initdir)" || $(MKDIR_P) "$(DESTDIR)$(initdir)" - @list='$(init_DATA)'; for p in $$list; do \ + @list='$(init_DATA)'; test -n "$(initdir)" || list=; \ + if test -n "$$list"; then \ + echo " $(MKDIR_P) '$(DESTDIR)$(initdir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(initdir)" || exit 1; \ + fi; \ + for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ - f=$(am__strip_dir) \ - echo " $(initDATA_INSTALL) '$$d$$p' '$(DESTDIR)$(initdir)/$$f'"; \ - $(initDATA_INSTALL) "$$d$$p" "$(DESTDIR)$(initdir)/$$f"; \ + echo "$$d$$p"; \ + done | $(am__base_list) | \ + while read files; do \ + echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(initdir)'"; \ + $(INSTALL_DATA) $$files "$(DESTDIR)$(initdir)" || exit $$?; \ done uninstall-initDATA: @$(NORMAL_UNINSTALL) - @list='$(init_DATA)'; for p in $$list; do \ - f=$(am__strip_dir) \ - echo " rm -f '$(DESTDIR)$(initdir)/$$f'"; \ - rm -f "$(DESTDIR)$(initdir)/$$f"; \ - done + @list='$(init_DATA)'; test -n "$(initdir)" || list=; \ + files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ + dir='$(DESTDIR)$(initdir)'; $(am__uninstall_files_from_dir) install-launchdDATA: $(launchd_DATA) @$(NORMAL_INSTALL) - test -z "$(launchddir)" || $(MKDIR_P) "$(DESTDIR)$(launchddir)" - @list='$(launchd_DATA)'; for p in $$list; do \ + @list='$(launchd_DATA)'; test -n "$(launchddir)" || list=; \ + if test -n "$$list"; then \ + echo " $(MKDIR_P) '$(DESTDIR)$(launchddir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(launchddir)" || exit 1; \ + fi; \ + for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ - f=$(am__strip_dir) \ - echo " $(launchdDATA_INSTALL) '$$d$$p' '$(DESTDIR)$(launchddir)/$$f'"; \ - $(launchdDATA_INSTALL) "$$d$$p" "$(DESTDIR)$(launchddir)/$$f"; \ + echo "$$d$$p"; \ + done | $(am__base_list) | \ + while read files; do \ + echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(launchddir)'"; \ + $(INSTALL_DATA) $$files "$(DESTDIR)$(launchddir)" || exit $$?; \ done uninstall-launchdDATA: @$(NORMAL_UNINSTALL) - @list='$(launchd_DATA)'; for p in $$list; do \ - f=$(am__strip_dir) \ - echo " rm -f '$(DESTDIR)$(launchddir)/$$f'"; \ - rm -f "$(DESTDIR)$(launchddir)/$$f"; \ - done + @list='$(launchd_DATA)'; test -n "$(launchddir)" || list=; \ + files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ + dir='$(DESTDIR)$(launchddir)'; $(am__uninstall_files_from_dir) install-nobase_sysconfDATA: $(nobase_sysconf_DATA) @$(NORMAL_INSTALL) - test -z "$(sysconfdir)" || $(MKDIR_P) "$(DESTDIR)$(sysconfdir)" - @$(am__vpath_adj_setup) \ - list='$(nobase_sysconf_DATA)'; for p in $$list; do \ - if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ - $(am__vpath_adj) \ - echo " $(nobase_sysconfDATA_INSTALL) '$$d$$p' '$(DESTDIR)$(sysconfdir)/$$f'"; \ - $(nobase_sysconfDATA_INSTALL) "$$d$$p" "$(DESTDIR)$(sysconfdir)/$$f"; \ + @list='$(nobase_sysconf_DATA)'; test -n "$(sysconfdir)" || list=; \ + if test -n "$$list"; then \ + echo " $(MKDIR_P) '$(DESTDIR)$(sysconfdir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(sysconfdir)" || exit 1; \ + fi; \ + $(am__nobase_list) | while read dir files; do \ + xfiles=; for file in $$files; do \ + if test -f "$$file"; then xfiles="$$xfiles $$file"; \ + else xfiles="$$xfiles $(srcdir)/$$file"; fi; done; \ + test -z "$$xfiles" || { \ + test "x$$dir" = x. || { \ + echo " $(MKDIR_P) '$(DESTDIR)$(sysconfdir)/$$dir'"; \ + $(MKDIR_P) "$(DESTDIR)$(sysconfdir)/$$dir"; }; \ + echo " $(INSTALL_DATA) $$xfiles '$(DESTDIR)$(sysconfdir)/$$dir'"; \ + $(INSTALL_DATA) $$xfiles "$(DESTDIR)$(sysconfdir)/$$dir" || exit $$?; }; \ done uninstall-nobase_sysconfDATA: @$(NORMAL_UNINSTALL) - @$(am__vpath_adj_setup) \ - list='$(nobase_sysconf_DATA)'; for p in $$list; do \ - $(am__vpath_adj) \ - echo " rm -f '$(DESTDIR)$(sysconfdir)/$$f'"; \ - rm -f "$(DESTDIR)$(sysconfdir)/$$f"; \ - done + @list='$(nobase_sysconf_DATA)'; test -n "$(sysconfdir)" || list=; \ + $(am__nobase_strip_setup); files=`$(am__nobase_strip)`; \ + dir='$(DESTDIR)$(sysconfdir)'; $(am__uninstall_files_from_dir) # This directory's subdirectories are mostly independent; you can cd # into them and run `make' without going through this Makefile. @@ -359,7 +447,7 @@ # (which will cause the Makefiles to be regenerated when you run `make'); # (2) otherwise, pass the desired values on the `make' command line. $(RECURSIVE_TARGETS): - @failcom='exit 1'; \ + @fail= failcom='exit 1'; \ for f in x $$MAKEFLAGS; do \ case $$f in \ *=* | --[!k]*);; \ @@ -376,7 +464,7 @@ else \ local_target="$$target"; \ fi; \ - (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ + ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ @@ -384,7 +472,7 @@ fi; test -z "$$fail" $(RECURSIVE_CLEAN_TARGETS): - @failcom='exit 1'; \ + @fail= failcom='exit 1'; \ for f in x $$MAKEFLAGS; do \ case $$f in \ *=* | --[!k]*);; \ @@ -410,16 +498,16 @@ else \ local_target="$$target"; \ fi; \ - (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ + ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done && test -z "$$fail" tags-recursive: list='$(SUBDIRS)'; for subdir in $$list; do \ - test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) tags); \ + test "$$subdir" = . || ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) tags); \ done ctags-recursive: list='$(SUBDIRS)'; for subdir in $$list; do \ - test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) ctags); \ + test "$$subdir" = . || ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) ctags); \ done ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) @@ -427,14 +515,14 @@ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ - $(AWK) ' { files[$$0] = 1; } \ - END { for (i in files) print i; }'`; \ + $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ + END { if (nonempty) { for (i in files) print i; }; }'`; \ mkid -fID $$unique tags: TAGS TAGS: tags-recursive $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) - tags=; \ + set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ @@ -446,39 +534,43 @@ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ - tags="$$tags $$include_option=$$here/$$subdir/TAGS"; \ + set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ - $(AWK) ' { files[$$0] = 1; } \ - END { for (i in files) print i; }'`; \ - if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \ + $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ + END { if (nonempty) { for (i in files) print i; }; }'`; \ + shift; \ + if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ - $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ - $$tags $$unique; \ + if test $$# -gt 0; then \ + $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ + "$$@" $$unique; \ + else \ + $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ + $$unique; \ + fi; \ fi ctags: CTAGS CTAGS: ctags-recursive $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) - tags=; \ - here=`pwd`; \ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ - $(AWK) ' { files[$$0] = 1; } \ - END { for (i in files) print i; }'`; \ - test -z "$(CTAGS_ARGS)$$tags$$unique" \ + $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ + END { if (nonempty) { for (i in files) print i; }; }'`; \ + test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ - $$tags $$unique + $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ - && cd $(top_srcdir) \ - && gtags -i $(GTAGS_ARGS) $$here + && $(am__cd) $(top_srcdir) \ + && gtags -i $(GTAGS_ARGS) "$$here" distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags @@ -499,29 +591,41 @@ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ + if test -d "$(distdir)/$$file"; then \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ - cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \ + cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ - cp -pR $$d/$$file $(distdir)$$dir || exit 1; \ + cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ - test -f $(distdir)/$$file \ - || cp -p $$d/$$file $(distdir)/$$file \ + test -f "$(distdir)/$$file" \ + || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done - list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ + @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ - test -d "$(distdir)/$$subdir" \ - || $(MKDIR_P) "$(distdir)/$$subdir" \ - || exit 1; \ - distdir=`$(am__cd) $(distdir) && pwd`; \ - top_distdir=`$(am__cd) $(top_distdir) && pwd`; \ - (cd $$subdir && \ + $(am__make_dryrun) \ + || test -d "$(distdir)/$$subdir" \ + || $(MKDIR_P) "$(distdir)/$$subdir" \ + || exit 1; \ + dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ + $(am__relativize); \ + new_distdir=$$reldir; \ + dir1=$$subdir; dir2="$(top_distdir)"; \ + $(am__relativize); \ + new_top_distdir=$$reldir; \ + echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ + echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ + ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ - top_distdir="$$top_distdir" \ - distdir="$$distdir/$$subdir" \ + top_distdir="$$new_top_distdir" \ + distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ + am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ @@ -544,10 +648,15 @@ installcheck: installcheck-recursive install-strip: - $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ - install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ - `test -z '$(STRIP)' || \ - echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install + if test -z '$(STRIP)'; then \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + install; \ + else \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ + fi mostlyclean-generic: clean-generic: @@ -555,6 +664,7 @@ distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @@ -573,6 +683,8 @@ html: html-recursive +html-am: + info: info-recursive info-am: @@ -580,21 +692,30 @@ install-data-am: install-initDATA install-launchdDATA @$(NORMAL_INSTALL) $(MAKE) $(AM_MAKEFLAGS) install-data-hook - install-dvi: install-dvi-recursive +install-dvi-am: + install-exec-am: install-nobase_sysconfDATA install-html: install-html-recursive +install-html-am: + install-info: install-info-recursive +install-info-am: + install-man: install-pdf: install-pdf-recursive +install-pdf-am: + install-ps: install-ps-recursive +install-ps-am: + installcheck-am: maintainer-clean: maintainer-clean-recursive @@ -616,8 +737,8 @@ uninstall-am: uninstall-initDATA uninstall-launchdDATA \ uninstall-nobase_sysconfDATA -.MAKE: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) install-am \ - install-data-am install-strip +.MAKE: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) ctags-recursive \ + install-am install-data-am install-strip tags-recursive .PHONY: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) CTAGS GTAGS \ all all-am check check-am clean clean-generic clean-libtool \ @@ -707,6 +828,7 @@ if test -n "$(init_DATA)"; then \ chmod +x "$(DESTDIR)$(initdir)/couchdb"; \ fi + # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: diff -Nru couchdb-1.2.0/etc/windows/couchdb.iss.tpl couchdb-1.4.0~rc.1/etc/windows/couchdb.iss.tpl --- couchdb-1.2.0/etc/windows/couchdb.iss.tpl 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/etc/windows/couchdb.iss.tpl 2013-08-23 10:57:21.000000000 -0400 @@ -36,6 +36,12 @@ Source: "%locallibbindir%\..\releases\*.*"; DestDir: "{app}\releases"; Flags: ignoreversion uninsrestartdelete restartreplace recursesubdirs ; skip ./usr, ./var +; These are erlang requirements and not copied by our makefiles. +; From R14B01 onwards OTP may be built with a static OpenSSL so +; thse DLLs are now optional. +Source: "%openssl_bin_dir%\ssleay32.dll"; DestDir: "{app}\bin"; Flags: ignoreversion uninsrestartdelete restartreplace skipifsourcedoesntexist +Source: "%openssl_bin_dir%\libeay32.dll"; DestDir: "{app}\bin"; Flags: ignoreversion uninsrestartdelete restartreplace skipifsourcedoesntexist + ; custom stuff... ; ./etc/default.ini is unconditional Source: "%locallibbindir%\..\etc\couchdb\default.ini"; DestDir: "{app}\etc\couchdb"; Flags: ignoreversion uninsrestartdelete restartreplace @@ -48,10 +54,6 @@ ; ( deleteafterinstall - not needed - {tmp} auto cleaned???? Source: "%msvc_redist_dir%\%msvc_redist_name%"; DestDir: "{tmp}"; Flags: deleteafterinstall -; These are erlang requirements and not copied by our makefiles. -Source: "%openssl_bin_dir%\ssleay32.dll"; DestDir: "{app}\bin"; Flags: ignoreversion uninsrestartdelete restartreplace -Source: "%openssl_bin_dir%\libeay32.dll"; DestDir: "{app}\bin"; Flags: ignoreversion uninsrestartdelete restartreplace - [Dirs] Name: "{app}\var\lib\couchdb"; Permissions: authusers-modify Name: "{app}\var\log\couchdb"; Permissions: authusers-modify @@ -62,6 +64,8 @@ Name: "{group}\Start CouchDB"; Filename: "{app}\bin\couchdb.bat" Name: "{group}\Futon (CouchDB web interface)"; Filename: "http://127.0.0.1:5984/_utils" Name: "{group}\CouchDB Web Site"; Filename: "http://couchdb.apache.org/" +Name: "{group}\CouchDB Online Docs"; Filename: "http://docs.couchdb.org/" +Name: "{group}\CouchDB Embedded Docs"; Filename: "http://localhost:5984/_utils/docs/" [Tasks] Name: service; Description: "Install couchdb as a Windows service" diff -Nru couchdb-1.2.0/etc/windows/Makefile.in couchdb-1.4.0~rc.1/etc/windows/Makefile.in --- couchdb-1.2.0/etc/windows/Makefile.in 2012-03-29 17:05:38.000000000 -0400 +++ couchdb-1.4.0~rc.1/etc/windows/Makefile.in 2013-08-23 10:57:40.000000000 -0400 @@ -1,8 +1,9 @@ -# Makefile.in generated by automake 1.10 from Makefile.am. +# Makefile.in generated by automake 1.11.6 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, -# 2003, 2004, 2005, 2006 Free Software Foundation, Inc. +# 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software +# Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. @@ -14,9 +15,27 @@ @SET_MAKE@ VPATH = @srcdir@ +am__make_dryrun = \ + { \ + am__dry=no; \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + echo 'am--echo: ; @echo "AM" OK' | $(MAKE) -f - 2>/dev/null \ + | grep '^AM OK$$' >/dev/null || am__dry=yes;; \ + *) \ + for am__flg in $$MAKEFLAGS; do \ + case $$am__flg in \ + *=*|--*) ;; \ + *n*) am__dry=yes; break;; \ + esac; \ + done;; \ + esac; \ + test $$am__dry = yes; \ + } pkgdatadir = $(datadir)/@PACKAGE@ -pkglibdir = $(libdir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c @@ -34,20 +53,24 @@ subdir = etc/windows DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 -am__aclocal_m4_deps = $(top_srcdir)/m4/ac_check_curl.m4 \ - $(top_srcdir)/m4/ac_check_icu.m4 $(top_srcdir)/m4/libtool.m4 \ +am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \ $(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \ $(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \ - $(top_srcdir)/m4/pkg.m4 $(top_srcdir)/acinclude.m4 \ - $(top_srcdir)/configure.ac + $(top_srcdir)/acinclude.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h \ $(top_builddir)/src/snappy/google-snappy/config.h CONFIG_CLEAN_FILES = +CONFIG_CLEAN_VPATH_FILES = SOURCES = DIST_SOURCES = +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ @@ -87,7 +110,11 @@ FGREP = @FGREP@ FLAGS = @FLAGS@ GREP = @GREP@ -HELP2MAN_EXECUTABLE = @HELP2MAN_EXECUTABLE@ +HAS_HELP2MAN = @HAS_HELP2MAN@ +HAS_INSTALLINFO = @HAS_INSTALLINFO@ +HAS_MAKEINFO = @HAS_MAKEINFO@ +HAS_PDFLATEX = @HAS_PDFLATEX@ +HAS_SPHINX_BUILD = @HAS_SPHINX_BUILD@ ICU_BIN = @ICU_BIN@ ICU_CFLAGS = @ICU_CFLAGS@ ICU_CONFIG = @ICU_CONFIG@ @@ -100,10 +127,7 @@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ -JS185_CFLAGS = @JS185_CFLAGS@ -JS185_LIBS = @JS185_LIBS@ JS_CFLAGS = @JS_CFLAGS@ -JS_LDFLAGS = @JS_LDFLAGS@ JS_LIBS = @JS_LIBS@ JS_LIB_BINARY = @JS_LIB_BINARY@ LD = @LD@ @@ -115,6 +139,7 @@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ +MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ NM = @NM@ NMEDIT = @NMEDIT@ @@ -127,6 +152,7 @@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PKG_CONFIG = @PKG_CONFIG@ @@ -145,6 +171,7 @@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ +ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ @@ -213,6 +240,7 @@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ +top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ version = @version@ @@ -228,14 +256,14 @@ @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ - cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \ - && exit 0; \ + ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ + && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign etc/windows/Makefile'; \ - cd $(top_srcdir) && \ - $(AUTOMAKE) --foreign etc/windows/Makefile + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign etc/windows/Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --foreign etc/windows/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ @@ -253,6 +281,7 @@ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh +$(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo @@ -282,13 +311,17 @@ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ + if test -d "$(distdir)/$$file"; then \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ - cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \ + cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ - cp -pR $$d/$$file $(distdir)$$dir || exit 1; \ + cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ - test -f $(distdir)/$$file \ - || cp -p $$d/$$file $(distdir)/$$file \ + test -f "$(distdir)/$$file" \ + || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @@ -306,16 +339,22 @@ installcheck: installcheck-am install-strip: - $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ - install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ - `test -z '$(STRIP)' || \ - echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install + if test -z '$(STRIP)'; then \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + install; \ + else \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ + fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @@ -334,6 +373,8 @@ html: html-am +html-am: + info: info-am info-am: @@ -342,18 +383,28 @@ install-dvi: install-dvi-am +install-dvi-am: + install-exec-am: install-html: install-html-am +install-html-am: + install-info: install-info-am +install-info-am: + install-man: install-pdf: install-pdf-am +install-pdf-am: + install-ps: install-ps-am +install-ps-am: + installcheck-am: maintainer-clean: maintainer-clean-am @@ -387,6 +438,7 @@ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am uninstall uninstall-am + # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: diff -Nru couchdb-1.2.0/INSTALL couchdb-1.4.0~rc.1/INSTALL --- couchdb-1.2.0/INSTALL 2011-11-03 17:56:27.000000000 -0400 +++ couchdb-1.4.0~rc.1/INSTALL 2012-09-03 11:10:15.000000000 -0400 @@ -1,19 +1,25 @@ Installation Instructions ************************* -Copyright (C) 1994, 1995, 1996, 1999, 2000, 2001, 2002, 2004, 2005, -2006 Free Software Foundation, Inc. +Copyright (C) 1994-1996, 1999-2002, 2004-2011 Free Software Foundation, +Inc. -This file is free documentation; the Free Software Foundation gives -unlimited permission to copy, distribute and modify it. + Copying and distribution of this file, with or without modification, +are permitted in any medium without royalty provided the copyright +notice and this notice are preserved. This file is offered as-is, +without warranty of any kind. Basic Installation ================== -Briefly, the shell commands `./configure; make; make install' should + Briefly, the shell commands `./configure; make; make install' should configure, build, and install this package. The following more-detailed instructions are generic; see the `README' file for -instructions specific to this package. +instructions specific to this package. Some packages provide this +`INSTALL' file but do not implement all of the features documented +below. The lack of an optional feature in a given package is not +necessarily a bug. More recommendations for GNU packages can be found +in *note Makefile Conventions: (standards)Makefile Conventions. The `configure' shell script attempts to guess correct values for various system-dependent variables used during compilation. It uses @@ -42,7 +48,7 @@ you want to change it or regenerate `configure' using a newer version of `autoconf'. -The simplest way to compile this package is: + The simplest way to compile this package is: 1. `cd' to the directory containing the package's source code and type `./configure' to configure the package for your system. @@ -53,12 +59,22 @@ 2. Type `make' to compile the package. 3. Optionally, type `make check' to run any self-tests that come with - the package. + the package, generally using the just-built uninstalled binaries. 4. Type `make install' to install the programs and any data files and - documentation. + documentation. When installing into a prefix owned by root, it is + recommended that the package be configured and built as a regular + user, and only the `make install' phase executed with root + privileges. + + 5. Optionally, type `make installcheck' to repeat any self-tests, but + this time using the binaries in their final installed location. + This target does not install anything. Running this target as a + regular user, particularly if the prior `make install' required + root privileges, verifies that the installation completed + correctly. - 5. You can remove the program binaries and object files from the + 6. You can remove the program binaries and object files from the source code directory by typing `make clean'. To also remove the files that `configure' created (so you can compile the package for a different kind of computer), type `make distclean'. There is @@ -67,12 +83,22 @@ all sorts of other programs in order to regenerate files that came with the distribution. + 7. Often, you can also type `make uninstall' to remove the installed + files again. In practice, not all packages have tested that + uninstallation works correctly, even though it is required by the + GNU Coding Standards. + + 8. Some packages, particularly those that use Automake, provide `make + distcheck', which can by used by developers to test that all other + targets like `make install' and `make uninstall' work correctly. + This target is generally not run by end users. + Compilers and Options ===================== -Some systems require unusual options for compilation or linking that the -`configure' script does not know about. Run `./configure --help' for -details on some of the pertinent environment variables. + Some systems require unusual options for compilation or linking that +the `configure' script does not know about. Run `./configure --help' +for details on some of the pertinent environment variables. You can give `configure' initial values for configuration parameters by setting variables in the command line or in the environment. Here @@ -85,25 +111,41 @@ Compiling For Multiple Architectures ==================================== -You can compile the package for more than one kind of computer at the + You can compile the package for more than one kind of computer at the same time, by placing the object files for each architecture in their own directory. To do this, you can use GNU `make'. `cd' to the directory where you want the object files and executables to go and run the `configure' script. `configure' automatically checks for the -source code in the directory that `configure' is in and in `..'. +source code in the directory that `configure' is in and in `..'. This +is known as a "VPATH" build. With a non-GNU `make', it is safer to compile the package for one architecture at a time in the source code directory. After you have installed the package for one architecture, use `make distclean' before reconfiguring for another architecture. + On MacOS X 10.5 and later systems, you can create libraries and +executables that work on multiple system types--known as "fat" or +"universal" binaries--by specifying multiple `-arch' options to the +compiler but only a single `-arch' option to the preprocessor. Like +this: + + ./configure CC="gcc -arch i386 -arch x86_64 -arch ppc -arch ppc64" \ + CXX="g++ -arch i386 -arch x86_64 -arch ppc -arch ppc64" \ + CPP="gcc -E" CXXCPP="g++ -E" + + This is not guaranteed to produce working output in all cases, you +may have to build one architecture at a time and combine the results +using the `lipo' tool if you have problems. + Installation Names ================== -By default, `make install' installs the package's commands under + By default, `make install' installs the package's commands under `/usr/local/bin', include files under `/usr/local/include', etc. You can specify an installation prefix other than `/usr/local' by giving -`configure' the option `--prefix=PREFIX'. +`configure' the option `--prefix=PREFIX', where PREFIX must be an +absolute file name. You can specify separate installation prefixes for architecture-specific files and architecture-independent files. If you @@ -114,16 +156,47 @@ In addition, if you use an unusual directory layout you can give options like `--bindir=DIR' to specify different values for particular kinds of files. Run `configure --help' for a list of the directories -you can set and what kinds of files go in them. +you can set and what kinds of files go in them. In general, the +default for these options is expressed in terms of `${prefix}', so that +specifying just `--prefix' will affect all of the other directory +specifications that were not explicitly provided. + + The most portable way to affect installation locations is to pass the +correct locations to `configure'; however, many packages provide one or +both of the following shortcuts of passing variable assignments to the +`make install' command line to change installation locations without +having to reconfigure or recompile. + + The first method involves providing an override variable for each +affected directory. For example, `make install +prefix=/alternate/directory' will choose an alternate location for all +directory configuration variables that were expressed in terms of +`${prefix}'. Any directories that were specified during `configure', +but not in terms of `${prefix}', must each be overridden at install +time for the entire installation to be relocated. The approach of +makefile variable overrides for each directory variable is required by +the GNU Coding Standards, and ideally causes no recompilation. +However, some platforms have known limitations with the semantics of +shared libraries that end up requiring recompilation when using this +method, particularly noticeable in packages that use GNU Libtool. + + The second method involves providing the `DESTDIR' variable. For +example, `make install DESTDIR=/alternate/directory' will prepend +`/alternate/directory' before all installation names. The approach of +`DESTDIR' overrides is not required by the GNU Coding Standards, and +does not work on platforms that have drive letters. On the other hand, +it does better at avoiding recompilation issues, and works well even +when some directory options were not specified in terms of `${prefix}' +at `configure' time. + +Optional Features +================= If the package supports it, you can cause programs to be installed with an extra prefix or suffix on their names by giving `configure' the option `--program-prefix=PREFIX' or `--program-suffix=SUFFIX'. -Optional Features -================= - -Some packages pay attention to `--enable-FEATURE' options to + Some packages pay attention to `--enable-FEATURE' options to `configure', where FEATURE indicates an optional part of the package. They may also pay attention to `--with-PACKAGE' options, where PACKAGE is something like `gnu-as' or `x' (for the X Window System). The @@ -135,14 +208,58 @@ you can use the `configure' options `--x-includes=DIR' and `--x-libraries=DIR' to specify their locations. + Some packages offer the ability to configure how verbose the +execution of `make' will be. For these packages, running `./configure +--enable-silent-rules' sets the default to minimal output, which can be +overridden with `make V=1'; while running `./configure +--disable-silent-rules' sets the default to verbose, which can be +overridden with `make V=0'. + +Particular systems +================== + + On HP-UX, the default C compiler is not ANSI C compatible. If GNU +CC is not installed, it is recommended to use the following options in +order to use an ANSI C compiler: + + ./configure CC="cc -Ae -D_XOPEN_SOURCE=500" + +and if that doesn't work, install pre-built binaries of GCC for HP-UX. + + HP-UX `make' updates targets which have the same time stamps as +their prerequisites, which makes it generally unusable when shipped +generated files such as `configure' are involved. Use GNU `make' +instead. + + On OSF/1 a.k.a. Tru64, some versions of the default C compiler cannot +parse its `' header file. The option `-nodtk' can be used as +a workaround. If GNU CC is not installed, it is therefore recommended +to try + + ./configure CC="cc" + +and if that doesn't work, try + + ./configure CC="cc -nodtk" + + On Solaris, don't put `/usr/ucb' early in your `PATH'. This +directory contains several dysfunctional programs; working variants of +these programs are available in `/usr/bin'. So, if you need `/usr/ucb' +in your `PATH', put it _after_ `/usr/bin'. + + On Haiku, software installed for all users goes in `/boot/common', +not `/usr/local'. It is recommended to use the following options: + + ./configure --prefix=/boot/common + Specifying the System Type ========================== -There may be some features `configure' cannot figure out automatically, -but needs to determine by the type of machine the package will run on. -Usually, assuming the package is built to be run on the _same_ -architectures, `configure' can figure that out, but if it prints a -message saying it cannot guess the machine type, give it the + There may be some features `configure' cannot figure out +automatically, but needs to determine by the type of machine the package +will run on. Usually, assuming the package is built to be run on the +_same_ architectures, `configure' can figure that out, but if it prints +a message saying it cannot guess the machine type, give it the `--build=TYPE' option. TYPE can either be a short name for the system type, such as `sun4', or a canonical name which has the form: @@ -150,7 +267,8 @@ where SYSTEM can have one of these forms: - OS KERNEL-OS + OS + KERNEL-OS See the file `config.sub' for the possible values of each field. If `config.sub' isn't included in this package, then this package doesn't @@ -168,9 +286,9 @@ Sharing Defaults ================ -If you want to set default values for `configure' scripts to share, you -can create a site shell script called `config.site' that gives default -values for variables like `CC', `cache_file', and `prefix'. + If you want to set default values for `configure' scripts to share, +you can create a site shell script called `config.site' that gives +default values for variables like `CC', `cache_file', and `prefix'. `configure' looks for `PREFIX/share/config.site' if it exists, then `PREFIX/etc/config.site' if it exists. Or, you can set the `CONFIG_SITE' environment variable to the location of the site script. @@ -179,7 +297,7 @@ Defining Variables ================== -Variables not defined in a site shell script can be set in the + Variables not defined in a site shell script can be set in the environment passed to `configure'. However, some packages may run configure again during the build, and the customized values of these variables may be lost. In order to avoid this problem, you should set @@ -198,11 +316,19 @@ `configure' Invocation ====================== -`configure' recognizes the following options to control how it operates. + `configure' recognizes the following options to control how it +operates. `--help' `-h' - Print a summary of the options to `configure', and exit. + Print a summary of all of the options to `configure', and exit. + +`--help=short' +`--help=recursive' + Print a summary of the options unique to this package's + `configure', and exit. The `short' variant lists options used + only in the top level, while the `recursive' variant lists options + also present in any nested packages. `--version' `-V' @@ -229,6 +355,16 @@ Look for the package's source code in directory DIR. Usually `configure' can determine that directory automatically. +`--prefix=DIR' + Use DIR as the installation prefix. *note Installation Names:: + for more details, including other options available for fine-tuning + the installation locations. + +`--no-create' +`-n' + Run the configure checks, but stop before creating any output + files. + `configure' also accepts some other, not widely useful, options. Run `configure --help' for more details. diff -Nru couchdb-1.2.0/INSTALL.Unix couchdb-1.4.0~rc.1/INSTALL.Unix --- couchdb-1.2.0/INSTALL.Unix 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/INSTALL.Unix 2013-08-23 10:57:21.000000000 -0400 @@ -3,6 +3,16 @@ A high-level guide to Unix-like systems, inc. Mac OS X and Ubuntu. +Community installation guides are available on the wiki: + + http://wiki.apache.org/couchdb/Installation + +This document is the canonical source of installation information. However, many +systems have gotchas that you need to be aware of. In addition, dependencies +frequently change as distributions update their archives. If you're running into +trouble, be sure to check out the wiki. If you have any tips to share, please +also update the wiki so that others can benefit from your experience. + Troubleshooting --------------- @@ -25,69 +35,91 @@ You should have the following installed: - * Erlang OTP (>=R12B5) (http://erlang.org/) - * ICU (http://icu.sourceforge.net/) - * OpenSSL (http://www.openssl.org/) - * Mozilla SpiderMonkey (1.7) (http://www.mozilla.org/js/spidermonkey/) - * GNU Make (http://www.gnu.org/software/make/) - * GNU Compiler Collection (http://gcc.gnu.org/) - * libcurl (http://curl.haxx.se/libcurl/) - * help2man (http://www.gnu.org/s/help2man/) + * Erlang OTP (>=R13B04, =2.7) for docs (http://python.org/) + * Python Sphinx (>=1.1.3) (http://pypi.python.org/pypi/Sphinx) -It is recommended that you install Erlang OTP R12B-5 or above where possible. +It is recommended that you install Erlang OTP R13B-4 or above where possible. You will only need libcurl if you plan to run the JavaScript test suite. And help2man is only need if you plan on installing the CouchDB man pages. - -Ubuntu -~~~~~~ - -For up to date instructions, please see: - - http://wiki.apache.org/couchdb/Installing_on_Ubuntu - -Unfortunately, it seems that installing dependancies on Ubuntu is troublesome. +Python and Sphinx are only required for building the online documentation. Debian-based Systems ~~~~~~~~~~~~~~~~~~~~ -You can install the build tools by running: +You can install the dependencies by running: sudo apt-get install build-essential - -You can install the other dependencies by running: - - sudo apt-get install erlang libicu-dev libmozjs-dev libcurl4-openssl-dev + sudo apt-get install erlang-base-hipe + sudo apt-get install erlang-dev + sudo apt-get install erlang-manpages + sudo apt-get install erlang-eunit + sudo apt-get install erlang-nox + sudo apt-get install libicu-dev + sudo apt-get install libmozjs-dev + sudo apt-get install libcurl4-openssl-dev + sudo apt-get install pkg-config + +There are lots of Erlang packages. If there is a problem with your install, try +a different mix. There is more information on the wiki. Additionally, you might +want to install some of the optional Erlang tools which may also be useful. Be sure to update the version numbers to match your system's available packages. -Mac OS X -~~~~~~~~ +For up to date instructions, please see: -You can install the build tools by running: + http://wiki.apache.org/couchdb/Installing_on_Debian - open /Applications/Installers/Xcode\ Tools/XcodeTools.mpkg + http://wiki.apache.org/couchdb/Installing_on_Ubuntu -You can install the other dependencies by running: +Unfortunately, it seems that installing dependencies on Ubuntu is troublesome. - brew install erlang icu4c spidermonkey curl +RedHat-based (Fedora, Centos, RHEL) Systems +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -You may want to link ICU so that CouchDB can find the header files automatically: +You can install the dependencies by running: - brew link icu4c + sudo yum groupinstall "Development Tools" + sudo yum install autoconf + sudo yum install autoconf-archive + sudo yum install automake + sudo yum install libtool + sudo yum install perl-Test-Harness + sudo yum install erlang-etap + sudo yum install erlang-erts + sudo yum install erlang-os_mon + sudo yum install erlang-eunit + sudo yum install libicu-devel + sudo yum install js-devel + sudo yum install curl-devel + sudo yum install pkg-config -The same is true for recent versions of Erlang: +While CouchDB builds against the default js-devel-1.7.0 included in some +distributions, it's recommended to use a more recent js-devel-1.8.5. - brew link erlang +Mac OS X +~~~~~~~~ + +To build CouchDB from source on Mac OS X, you will need to install Xcode. -If you are upgrading your version of CouchDB and have an older nspr and -Spidermonkey installed you may encounter an error during the ./configure step -below. This is generally due to nspr being installed without its pkg-config -description. To fix the issue: +You can install the other dependencies by running: - brew remove --force spidermonkey - brew remove --force nspr - brew update + brew install autoconf + brew install autoconf-archive + brew install automake + brew install libtool + brew install erlang + brew install icu4c brew install spidermonkey + brew install curl + brew install pkg-config You will need Homebrew installed to use the `brew` command. @@ -196,6 +228,10 @@ chmod 0770 /usr/local/var/log/couchdb chmod 0770 /usr/local/var/run/couchdb +Update the permissions for your `default.ini` file: + + chmod 0644 /usr/local/etc/couchdb/default.ini + Running as a Daemon ------------------- diff -Nru couchdb-1.2.0/INSTALL.Windows couchdb-1.4.0~rc.1/INSTALL.Windows --- couchdb-1.2.0/INSTALL.Windows 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/INSTALL.Windows 2013-08-23 10:57:21.000000000 -0400 @@ -3,33 +3,63 @@ For a high-level guide to Microsoft Windows. +Troubleshooting +--------------- + +There is a troubleshooting guide: + + http://wiki.apache.org/couchdb/Troubleshooting + +There is a wiki for general documentation: + + http://wiki.apache.org/couchdb/ + +And some Windows-specific tips: + + http://wiki.apache.org/couchdb/Quirks_on_Windows + +There are collection of friendly mailing lists: + + http://couchdb.apache.org/community/lists.html + +Please work through these in order if you experience any problems. + Dependencies ------------ You will need the following installed: - * Erlang OTP (=14B01) (http://erlang.org/) - * ICU (=4.4.*) (http://icu.sourceforge.net/) - * OpenSSL (http://www.openssl.org/) - * Mozilla SpiderMonkey (1.7) (http://www.mozilla.org/js/spidermonkey/) - * libcurl (http://curl.haxx.se/libcurl/) - * Cygwin (http://www.cygwin.com/) - * Visual Studio 2008 (http://msdn.microsoft.com/en-gb/vstudio/default.aspx) + * Erlang OTP (>=14B01, =4.*) (http://icu-project.org/) + * OpenSSL (>=0.9.8r) (http://www.openssl.org/) + * Mozilla SpiderMonkey (=1.8.5) (http://www.mozilla.org/js/spidermonkey/) + * libcurl (>=7.20) (http://curl.haxx.se/libcurl/) + * Cygwin (http://www.cygwin.com/) + * Microsoft SDK 7.0 or 7.1 (http://www.microsoft.com/en-us/download/details.aspx?id=8279) + * Python (>= 2.68) for docs (http://python.org/) + * Python Sphinx (>=1.1.3) (http://pypi.python.org/pypi/Sphinx) General Notes ------------- + * When installing Cygwin, be sure to select all the `development` tools. + * When installing Erlang, you must build it from source. - The CouchDB build requires a number of the Erlang build scripts. + * The CouchDB build requires a number of the Erlang build scripts. - * When installing ICU, select the binaries built with Visual Studio 2008. + * All dependent libraries should be built with the same version of + microsoft SDK. - * When installing Cygwin, be sure to select all the `development` tools. + * Do not try to link against libraries built with, or included in, + Cygwin or MingW. They are not compatible with the Erlang/OTP or CouchDB + build scripts. - * When installing libcurl, be sure to install by hand. + * ICU version 4.6 and later will build cleanly using MSBuild. - The Cygwin binaries are incompatible and will not work with Erlang. + * Python and Sphinx are optional for building the online documentation. + Use cygwin-provided Python and install Sphinx via easy_install or pip. + Further information is here http://pypi.python.org/pypi/setuptools#id4 Setting Up Cygwin ----------------- @@ -54,13 +84,16 @@ * The `which mt` command points to the Microsoft manifest tool. + * The `which nmake` command points to the Microsoft make tool. + If you do not do this, the build may fail due to Cygwin ones found in `/usr/bin` being used instead. Building Erlang --------------- -You must include Win32 OpenSSL. +You must include Win32 OpenSSL, built statically from source. Use +exactly the same version as required by the Erlang/OTP build process. However, you can skip the GUI tools by running: @@ -68,6 +101,8 @@ echo "skipping ic" > lib/ic/SKIP + echo "skipping jinterface" > lib/jinterface/SKIP + Follow the rest of the Erlang instructions as described. After running: @@ -76,9 +111,11 @@ You should run: - ./release/win32/Install.exe + ./release/win32/Install.exe -s -This will set up the release/win32/bin directory correctly. +This will set up the release/win32/bin directory correctly. The CouchDB +installation scripts currently write their data directly into this +location. To set up your environment for building CouchDB, run: @@ -94,7 +131,7 @@ To set up your path, run: - export PATH=$ERL_TOP/release/win32/erts-5.8.2/bin:$PATH + export PATH=$ERL_TOP/release/win32/erts-5.8.5/bin:$PATH If everything was successful, you should be ready to build CouchDB. @@ -103,6 +140,11 @@ Building CouchDB ---------------- +Note that `win32-curl` is only required if you wish to run the developer +tests. + +The documentation step may be skipped using `--disable-docs` if you wish. + Once you have satisfied the dependencies you should run: ./configure \ @@ -113,6 +155,8 @@ --with-win32-curl=/cygdrive/c/path/to/curl/root/directory \ --with-openssl-bin-dir=/cygdrive/c/openssl/bin \ --with-msvc-redist-dir=/cygdrive/c/dir/with/vcredist_platform_executable \ + --disable-init \ + --disable-launchd \ --prefix=$ERL_TOP/release/win32 This command could take a while to complete. @@ -133,6 +177,13 @@ Relax. +To build the .exe installer package, you should run: + + make dist + +Alternatively, you may run CouchDB directly from the build tree, but +to avoid any contamination do not run `make dist` after this. + First Run --------- @@ -150,4 +201,4 @@ http://127.0.0.1:5984/_utils/index.html -From here you should run the test suite in either Firefox 3.6+ or Safari 4+. +From here you should run the verification tests in Firefox. diff -Nru couchdb-1.2.0/LICENSE couchdb-1.4.0~rc.1/LICENSE --- couchdb-1.2.0/LICENSE 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/LICENSE 2013-08-23 10:57:21.000000000 -0400 @@ -201,24 +201,125 @@ See the License for the specific language governing permissions and limitations under the License. - Apache CouchDB Subcomponents The Apache CouchDB project includes a number of subcomponents with separate copyright notices and license terms. Your use of the code for the these subcomponents is subject to the terms and conditions of the following licenses. -For the m4/ac_check_icu.m4 component: +For the share/doc/build/html/_static components: + + Copyright (c) 2007-2011 by the Sphinx team (see AUTHORS file). + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +For the share/doc/build/html/_static/jquery.js component: + + Copyright 2010, John Resig + + Copyright 2010, The Dojo Foundation + + Copyright 2012 jQuery Foundation and other contributors + http://jquery.com/ - Copyright (c) 2005 Akos Maroy + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: - Copying and distribution of this file, with or without modification, are - permitted in any medium without royalty provided the copyright notice - and this notice are preserved. + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +For the share/doc/build/html/_static/underscore.js component: + + Copyright (c) 2009-2012 Jeremy Ashkenas, DocumentCloud + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without + restriction, including without limitation the rights to use, + copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following + conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + +For the share/doc/static/rtd.css component: + + Copyright (c) 2007-2011 by the Sphinx team (see AUTHORS file). + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. For the share/www/script/jquery.js component: - Copyright (c) 2009 John Resig, http://jquery.com/ + Copyright 2012 jQuery Foundation and other contributors + http://jquery.com/ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the @@ -239,8 +340,7 @@ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -For the share/www/script/jquery-ui-1.8.11.custom.min.js and - share/www/style/jquery-ui-1.8.11.custom.css components: +For the share/www/script/jquery-ui-* components: Copyright (c) 2011 Paul Bakaus, http://jqueryui.com/ @@ -269,6 +369,7 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. For the share/www/script/jquery.form.js component: +For the src/fauxton/assets/js/plugins/jquery.form.js component: http://malsup.com/jquery/form/ @@ -350,7 +451,7 @@ For the src/erlang-oauth component: - Copyright (c) 2008-2009 Tim Fletcher + Copyright the authors and contributors. All rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation @@ -503,3 +604,379 @@ WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +For src/fauxton/apps/modules/pouchdb + + Copyright (c) 2012 Dale Harvey et al + + Licensed under the Apache License, Version 2.0 (the "License"); you may not + use this file except in compliance with the License. You may obtain a copy of + the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + License for the specific language governing permissions and limitations under + the License. + +For src/fauxton/assets/js/libs/almond.js + + Copyright (c) 2010-2011, The Dojo Foundation + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the Dojo Foundation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +For src/fauxton/assets/js/libs/backbone.js + + Copyright (c) 2010-2013 Jeremy Ashkenas, DocumentCloud + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation + files (the "Software"), to deal in the Software without + restriction, including without limitation the rights to use, + copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following + conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + OTHER DEALINGS IN THE SOFTWARE. + +For src/fauxton/assets/js/libs/bootstrap.js +And for src/fauxton/assets/less/bootstrap + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +For src/fauxton/assets/less/bootstrap + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +For src/fauxton/assets/js/libs/d3.js + + Copyright (c) 2012, Michael Bostock + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * The name Michael Bostock may not be used to endorse or promote products + derived from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL MICHAEL BOSTOCK BE LIABLE FOR ANY DIRECT, + INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY + OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, + EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +For src/fauxton/assets/js/libs/jshint.js + + Copyright 2012 Anton Kovalyov (http://jshint.com) + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +For src/fauxton/assets/js/libs/lodash.js + + Copyright 2012-2013 The Dojo Foundation + Based on Underscore.js 1.4.3, copyright 2009-2013 Jeremy Ashkenas, + DocumentCloud Inc. + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +For src/fauxton/assets/js/libs/nv.d3.js + + Copyright (c) 2011, 2012 [Novus Partners, Inc.][novus] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + [novus]: https://www.novus.com/ + +For src/fauxton/assets/js/libs/require.js + + Copyright (c) 2010-2011, The Dojo Foundation + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the Dojo Foundation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +For src/fauxton/assets/js/plugins/backbone.layoutmanager.js + + Copyright (c) 2013 Tim Branyen + + Permission is hereby granted, free of charge, to any person obtaining a copy of + this software and associated documentation files (the "Software"), to deal in + the Software without restriction, including without limitation the rights to + use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies + of the Software, and to permit persons to whom the Software is furnished to do + so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + +For src/fauxton/assets/js/plugins/prettify.js + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +For src/fauxton/assets/js/libs/codemirror.js + + Copyright (C) 2013 by Marijn Haverbeke + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + + Please note that some subdirectories of the CodeMirror distribution + include their own LICENSE files, and are released under different + licences. + +For src/fauxton/assets/js/plugins/codemirror-javascript.js + + Copyright (C) 2013 by Marijn Haverbeke + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + + Please note that some subdirectories of the CodeMirror distribution + include their own LICENSE files, and are released under different + licences. + +For the src/couch_dbupdates component + + 2009-2012 (c) Benoît Chesneau + + Apache 2 License, see above. + +For src/fauxton/test/mocha/mocha.js and src/fauxton/test/mocha/mocha.js + + Copyright (c) 2011-2013 TJ Holowaychuk + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + 'Software'), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +For src/fauxton/test/mocha/chai.js + + Copyright (c) 2011-2013 Jake Luer jake@alogicalparadox.com + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + 'Software'), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. + IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, + TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE + SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +for src/fauxton/test/mocha/sinon-chai.js + + DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE + Version 2, December 2004 + + Copyright © 2012–2013 Domenic Denicola + + Everyone is permitted to copy and distribute verbatim or modified + copies of this license document, and changing it is allowed as long + as the name is changed. + + DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. You just DO WHAT THE FUCK YOU WANT TO. + +for src/fauxton/js/libs/spin.js + The MIT License + + Copyright (c) 2011 Felix Gnass [fgnass at neteye dot de] + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. diff -Nru couchdb-1.2.0/license.skip couchdb-1.4.0~rc.1/license.skip --- couchdb-1.2.0/license.skip 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/license.skip 2013-08-23 10:57:21.000000000 -0400 @@ -1,4 +1,4 @@ -\.svn +^.*.DS_Store ^AUTHORS ^BUGS ^CHANGES @@ -18,26 +18,26 @@ ^README ^THANKS ^aclocal.m4 -^apache-couchdb-* -^autom4te.cache/* +^apache-couchdb-.* +^autom4te.cache/.* ^bin/Makefile ^bin/Makefile.in ^bin/couchdb.1 ^bin/couchjs.1 -^build-aux/* -^config.* +^build-aux/.* +^config..* ^configure ^couchdb.stderr ^couchdb.stdout -^cover/.*\.coverdata -^cover/.*\.html +^cover/.*.coverdata +^cover/.*.html ^erl_crash.dump ^etc/Makefile ^etc/Makefile.in ^etc/couchdb/Makefile ^etc/couchdb/Makefile.in -^etc/couchdb/default* -^etc/couchdb/local* +^etc/couchdb/default.* +^etc/couchdb/local.* ^etc/default/Makefile ^etc/default/Makefile.in ^etc/default/couchdb @@ -45,79 +45,102 @@ ^etc/init/Makefile.in ^etc/launchd/Makefile ^etc/launchd/Makefile.in -^etc/launchd/org.apache.couchdb.plist.* +^etc/launchd/org.apache.couchdb.plist ^etc/logrotate.d/Makefile ^etc/logrotate.d/Makefile.in -^etc/logrotate.d/couchdb* +^etc/logrotate.d/couchdb.* ^etc/windows/Makefile ^etc/windows/README.txt.tpl ^libtool ^license.skip -^m4/* +^m4/.* ^share/Makefile ^share/Makefile.in +^share/doc/Makefile +^share/doc/Makefile.in +^share/doc/build/.* +^share/doc/images/.* +^share/doc/static/rtd.css ^share/server/json2.js ^share/server/mimeparse.js ^share/server/coffee-script.js ^share/www/favicon.ico -^share/www/image/* -^share/www/script/jquery.* +^share/www/image/.* +^share/www/script/jquery..* ^share/www/script/json2.js -^share/www/script/jspec/* +^share/www/script/jspec/.* ^share/www/script/sha1.js ^share/www/script/base64.js -^share/www/script/test/lorem* +^share/www/script/test/lorem.* ^share/www/style/jquery-ui-1.8.11.custom.css ^src/Makefile ^src/Makefile.in ^src/couch_index/Makefile ^src/couch_index/Makefile.in -^src/couch_index/ebin/.*beam +^src/couch_index/ebin/.*.beam ^src/couch_mrview/Makefile ^src/couch_mrview/Makefile.in -^src/couch_mrview/ebin/.*beam +^src/couch_mrview/ebin/.*.beam ^src/couch_replicator/Makefile ^src/couch_replicator/Makefile.in -^src/couch_replicator/ebin/.*beam -^src/couchdb/.*beam -^src/couchdb/.deps/* +^src/couch_replicator/ebin/.*.beam +^src/couchdb/.*.beam +^src/couchdb/.deps/.* ^src/couchdb/Makefile ^src/couchdb/Makefile.in -^src/couchdb/couch.app* -^src/couchdb/couch.app.tpl.in -^src/couchdb/priv/.*o -^src/couchdb/priv/.deps/* +^src/couchdb/couch.app.* +^src/couchdb/priv/.*.o +^src/couchdb/priv/.deps/.* ^src/couchdb/priv/Makefile ^src/couchdb/priv/Makefile.in ^src/couchdb/priv/couch_icu_driver.la ^src/couchdb/priv/couchjs ^src/couchdb/priv/couchspawnkillable ^src/couchdb/priv/stat_descriptions.cfg -^src/erlang-oauth/* -^src/ejson/* -^src/etap/* -^src/ibrowse/* -^src/mochiweb/* -^src/snappy/* +^src/erlang-oauth/.* +^src/couch_dbupdates +^src/ejson/.* +^src/etap/.* +^src/fauxton/app/app.js +^src/fauxton/app/config.js +^src/fauxton/app/main.js +^src/fauxton/app/modules/pouchdb/* +^src/fauxton/assets/.* +^src/fauxton/couchapp.js +^src/fauxton/favicon.ico +^src/fauxton/package.json +^src/fauxton/readme.md +^src/fauxton/writing_addons.md +^src/fauxton/TODO.md +^src/fauxton/settings.json.* +^src/fauxton/test/.* +^src/fauxton/tasks/addon/rename.json +^src/fauxton/app/addons/activetasks/assets/less/activetasks.less +^src/fauxton/app/addons/auth/assets/less/auth.less +^src/ibrowse/.* +^src/mochiweb/.* +^src/snappy/.* ^stamp-h1 ^test/Makefile ^test/Makefile.in ^test/bench/Makefile ^test/bench/Makefile.in -^test/etap/.*beam -^test/etap/.*\.o -^test/etap/.deps/* +^test/etap/.*.beam +^test/etap/.*.o +^test/etap/.deps/.* ^test/etap/test_cfg_register ^test/etap/Makefile ^test/etap/Makefile.in -^test/etap/temp.* +^test/etap/temp..* +^test/etap/fixtures/* ^test/javascript/Makefile ^test/javascript/Makefile.in ^test/local.ini ^test/view_server/Makefile ^test/view_server/Makefile.in -^tmp/* +^tmp/.* ^utils/Makefile ^utils/Makefile.in ^var/Makefile ^var/Makefile.in +^Vagrantfile diff -Nru couchdb-1.2.0/m4/ac_check_curl.m4 couchdb-1.4.0~rc.1/m4/ac_check_curl.m4 --- couchdb-1.2.0/m4/ac_check_curl.m4 2012-03-29 17:05:31.000000000 -0400 +++ couchdb-1.4.0~rc.1/m4/ac_check_curl.m4 1969-12-31 19:00:00.000000000 -0500 @@ -1,69 +0,0 @@ -# =========================================================================== -# http://autoconf-archive.cryp.to/ac_check_curl.html -# =========================================================================== -# -# SYNOPSIS -# -# AC_CHECK_CURL(version, action-if, action-if-not) -# -# DESCRIPTION -# -# Defines CURL_LIBS, CURL_CFLAGS. See curl-config(1) man page. -# -# LAST MODIFICATION -# -# 2008-04-12 -# -# COPYLEFT -# -# Copyright (c) 2008 Akos Maroy -# -# Copying and distribution of this file, with or without modification, are -# permitted in any medium without royalty provided the copyright notice -# and this notice are preserved. - -AC_DEFUN([AC_CHECK_CURL], [ - succeeded=no - - if test -z "$CURL_CONFIG"; then - AC_PATH_PROG(CURL_CONFIG, curl-config, no) - fi - - if test "$CURL_CONFIG" = "no" ; then - echo "*** The curl-config script could not be found. Make sure it is" - echo "*** in your path, and that curl is properly installed." - echo "*** Or see http://curl.haxx.se/" - else - dnl curl-config --version returns "libcurl ", thus cut the number - CURL_VERSION=`$CURL_CONFIG --version | cut -d" " -f2` - AC_MSG_CHECKING(for curl >= $1) - VERSION_CHECK=`expr $CURL_VERSION \>\= $1` - if test "$VERSION_CHECK" = "1" ; then - AC_MSG_RESULT(yes) - succeeded=yes - - AC_MSG_CHECKING(CURL_CFLAGS) - CURL_CFLAGS=`$CURL_CONFIG --cflags` - AC_MSG_RESULT($CURL_CFLAGS) - - AC_MSG_CHECKING(CURL_LIBS) - CURL_LIBS=`$CURL_CONFIG --libs` - AC_MSG_RESULT($CURL_LIBS) - else - CURL_CFLAGS="" - CURL_LIBS="" - ## If we have a custom action on failure, don't print errors, but - ## do set a variable so people can do so. - ifelse([$3], ,echo "can't find curl >= $1",) - fi - - AC_SUBST(CURL_CFLAGS) - AC_SUBST(CURL_LIBS) - fi - - if test $succeeded = yes; then - ifelse([$2], , :, [$2]) - else - ifelse([$3], , AC_MSG_ERROR([Library requirements (curl) not met.]), [$3]) - fi -]) \ No newline at end of file diff -Nru couchdb-1.2.0/m4/ac_check_icu.m4 couchdb-1.4.0~rc.1/m4/ac_check_icu.m4 --- couchdb-1.2.0/m4/ac_check_icu.m4 2012-03-29 17:05:31.000000000 -0400 +++ couchdb-1.4.0~rc.1/m4/ac_check_icu.m4 1969-12-31 19:00:00.000000000 -0500 @@ -1,74 +0,0 @@ -# =========================================================================== -# http://autoconf-archive.cryp.to/ac_check_icu.html -# =========================================================================== -# -# SYNOPSIS -# -# AC_CHECK_ICU(version, action-if, action-if-not) -# -# DESCRIPTION -# -# Defines ICU_LIBS, ICU_CFLAGS, ICU_CXXFLAGS. See icu-config(1) man page. -# -# LAST MODIFICATION -# -# 2008-04-12 -# -# COPYLEFT -# -# Copyright (c) 2008 Akos Maroy -# -# Copying and distribution of this file, with or without modification, are -# permitted in any medium without royalty provided the copyright notice -# and this notice are preserved. - -AC_DEFUN([AC_CHECK_ICU], [ - succeeded=no - - if test -z "$ICU_CONFIG"; then - AC_PATH_PROG(ICU_CONFIG, icu-config, no) - fi - - if test "$ICU_CONFIG" = "no" ; then - echo "*** The icu-config script could not be found. Make sure it is" - echo "*** in your path, and that taglib is properly installed." - echo "*** Or see http://ibm.com/software/globalization/icu/" - else - ICU_VERSION=`$ICU_CONFIG --version` - AC_MSG_CHECKING(for ICU >= $1) - VERSION_CHECK=`expr $ICU_VERSION \>\= $1` - if test "$VERSION_CHECK" = "1" ; then - AC_MSG_RESULT(yes) - succeeded=yes - - AC_MSG_CHECKING(ICU_CFLAGS) - ICU_CFLAGS=`$ICU_CONFIG --cflags` - AC_MSG_RESULT($ICU_CFLAGS) - - AC_MSG_CHECKING(ICU_CXXFLAGS) - ICU_CXXFLAGS=`$ICU_CONFIG --cxxflags` - AC_MSG_RESULT($ICU_CXXFLAGS) - - AC_MSG_CHECKING(ICU_LIBS) - ICU_LIBS=`$ICU_CONFIG --ldflags` - AC_MSG_RESULT($ICU_LIBS) - else - ICU_CFLAGS="" - ICU_CXXFLAGS="" - ICU_LIBS="" - ## If we have a custom action on failure, don't print errors, but - ## do set a variable so people can do so. - ifelse([$3], ,echo "can't find ICU >= $1",) - fi - - AC_SUBST(ICU_CFLAGS) - AC_SUBST(ICU_CXXFLAGS) - AC_SUBST(ICU_LIBS) - fi - - if test $succeeded = yes; then - ifelse([$2], , :, [$2]) - else - ifelse([$3], , AC_MSG_ERROR([Library requirements (ICU) not met.]), [$3]) - fi -]) diff -Nru couchdb-1.2.0/m4/libtool.m4 couchdb-1.4.0~rc.1/m4/libtool.m4 --- couchdb-1.2.0/m4/libtool.m4 2012-03-29 17:05:31.000000000 -0400 +++ couchdb-1.4.0~rc.1/m4/libtool.m4 2013-08-23 10:57:22.000000000 -0400 @@ -168,10 +168,13 @@ dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_CHECK_SHELL_FEATURES])dnl +m4_require([_LT_PATH_CONVERSION_FUNCTIONS])dnl m4_require([_LT_CMD_RELOAD])dnl m4_require([_LT_CHECK_MAGIC_METHOD])dnl +m4_require([_LT_CHECK_SHAREDLIB_FROM_LINKLIB])dnl m4_require([_LT_CMD_OLD_ARCHIVE])dnl m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl +m4_require([_LT_WITH_SYSROOT])dnl _LT_CONFIG_LIBTOOL_INIT([ # See if we are running on zsh, and set the options which allow our @@ -707,8 +710,8 @@ fi cfgfile="${ofile}T" - trap "$RM -f \"$cfgfile\"; exit 1" 1 2 15 - $RM -f "$cfgfile" + trap "$RM \"$cfgfile\"; exit 1" 1 2 15 + $RM "$cfgfile" cat <<_LT_EOF >> "$cfgfile" #! $SHELL @@ -748,15 +751,12 @@ # if finds mixed CR/LF and LF-only lines. Since sed operates in # text mode, it properly converts lines to CR/LF. This bash problem # is reportedly fixed, but why not run on old versions too? - sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ - || (rm -f "$cfgfile"; exit 1) + sed '$q' "$ltmain" >> "$cfgfile" \ + || (rm -f "$cfgfile"; exit 1) - _LT_PROG_XSI_SHELLFNS + _LT_PROG_REPLACE_SHELLFNS - sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ - || (rm -f "$cfgfile"; exit 1) - - mv -f "$cfgfile" "$ofile" || + mv -f "$cfgfile" "$ofile" || (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") chmod +x "$ofile" ], @@ -902,7 +902,7 @@ echo "$lt_simple_compile_test_code" >conftest.$ac_ext eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_compiler_boilerplate=`cat conftest.err` -$RM -r conftest* +$RM conftest* ])# _LT_COMPILER_BOILERPLATE @@ -1073,30 +1073,41 @@ fi ]) -# _LT_SYS_MODULE_PATH_AIX -# ----------------------- +# _LT_SYS_MODULE_PATH_AIX([TAGNAME]) +# ---------------------------------- # Links a minimal program and checks the executable # for the system default hardcoded library path. In most cases, # this is /usr/lib:/lib, but when the MPI compilers are used # the location of the communication and MPI libs are included too. # If we don't find anything, use the default library path according # to the aix ld manual. +# Store the results from the different compilers for each TAGNAME. +# Allow to override them for all tags through lt_cv_aix_libpath. m4_defun([_LT_SYS_MODULE_PATH_AIX], [m4_require([_LT_DECL_SED])dnl -AC_LINK_IFELSE(AC_LANG_PROGRAM,[ -lt_aix_libpath_sed=' - /Import File Strings/,/^$/ { - /^0/ { - s/^0 *\(.*\)$/\1/ - p - } - }' -aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -# Check for a 64-bit object if we didn't find anything. -if test -z "$aix_libpath"; then - aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -fi],[]) -if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi +if test "${lt_cv_aix_libpath+set}" = set; then + aix_libpath=$lt_cv_aix_libpath +else + AC_CACHE_VAL([_LT_TAGVAR([lt_cv_aix_libpath_], [$1])], + [AC_LINK_IFELSE([AC_LANG_PROGRAM],[ + lt_aix_libpath_sed='[ + /Import File Strings/,/^$/ { + /^0/ { + s/^0 *\([^ ]*\) *$/\1/ + p + } + }]' + _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` + # Check for a 64-bit object if we didn't find anything. + if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then + _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` + fi],[]) + if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then + _LT_TAGVAR([lt_cv_aix_libpath_], [$1])="/usr/lib:/lib" + fi + ]) + aix_libpath=$_LT_TAGVAR([lt_cv_aix_libpath_], [$1]) +fi ])# _LT_SYS_MODULE_PATH_AIX @@ -1121,7 +1132,7 @@ AC_MSG_CHECKING([how to print strings]) # Test print first, because it will be a builtin if present. -if test "X`print -r -- -n 2>/dev/null`" = X-n && \ +if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then ECHO='print -r --' elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then @@ -1165,6 +1176,39 @@ ])# _LT_PROG_ECHO_BACKSLASH +# _LT_WITH_SYSROOT +# ---------------- +AC_DEFUN([_LT_WITH_SYSROOT], +[AC_MSG_CHECKING([for sysroot]) +AC_ARG_WITH([sysroot], +[ --with-sysroot[=DIR] Search for dependent libraries within DIR + (or the compiler's sysroot if not specified).], +[], [with_sysroot=no]) + +dnl lt_sysroot will always be passed unquoted. We quote it here +dnl in case the user passed a directory name. +lt_sysroot= +case ${with_sysroot} in #( + yes) + if test "$GCC" = yes; then + lt_sysroot=`$CC --print-sysroot 2>/dev/null` + fi + ;; #( + /*) + lt_sysroot=`echo "$with_sysroot" | sed -e "$sed_quote_subst"` + ;; #( + no|'') + ;; #( + *) + AC_MSG_RESULT([${with_sysroot}]) + AC_MSG_ERROR([The sysroot must be an absolute path.]) + ;; +esac + + AC_MSG_RESULT([${lt_sysroot:-no}]) +_LT_DECL([], [lt_sysroot], [0], [The root where to search for ]dnl +[dependent libraries, and in which our libraries should be installed.])]) + # _LT_ENABLE_LOCK # --------------- m4_defun([_LT_ENABLE_LOCK], @@ -1311,14 +1355,47 @@ ])# _LT_ENABLE_LOCK +# _LT_PROG_AR +# ----------- +m4_defun([_LT_PROG_AR], +[AC_CHECK_TOOLS(AR, [ar], false) +: ${AR=ar} +: ${AR_FLAGS=cru} +_LT_DECL([], [AR], [1], [The archiver]) +_LT_DECL([], [AR_FLAGS], [1], [Flags to create an archive]) + +AC_CACHE_CHECK([for archiver @FILE support], [lt_cv_ar_at_file], + [lt_cv_ar_at_file=no + AC_COMPILE_IFELSE([AC_LANG_PROGRAM], + [echo conftest.$ac_objext > conftest.lst + lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&AS_MESSAGE_LOG_FD' + AC_TRY_EVAL([lt_ar_try]) + if test "$ac_status" -eq 0; then + # Ensure the archiver fails upon bogus file names. + rm -f conftest.$ac_objext libconftest.a + AC_TRY_EVAL([lt_ar_try]) + if test "$ac_status" -ne 0; then + lt_cv_ar_at_file=@ + fi + fi + rm -f conftest.* libconftest.a + ]) + ]) + +if test "x$lt_cv_ar_at_file" = xno; then + archiver_list_spec= +else + archiver_list_spec=$lt_cv_ar_at_file +fi +_LT_DECL([], [archiver_list_spec], [1], + [How to feed a file listing to the archiver]) +])# _LT_PROG_AR + + # _LT_CMD_OLD_ARCHIVE # ------------------- m4_defun([_LT_CMD_OLD_ARCHIVE], -[AC_CHECK_TOOL(AR, ar, false) -test -z "$AR" && AR=ar -test -z "$AR_FLAGS" && AR_FLAGS=cru -_LT_DECL([], [AR], [1], [The archiver]) -_LT_DECL([], [AR_FLAGS], [1]) +[_LT_PROG_AR AC_CHECK_TOOL(STRIP, strip, :) test -z "$STRIP" && STRIP=: @@ -1396,7 +1473,7 @@ $2=yes fi fi - $RM -r conftest* + $RM conftest* ]) if test x"[$]$2" = xyes; then @@ -1658,10 +1735,10 @@ /* When -fvisbility=hidden is used, assume the code has been annotated correspondingly for the symbols needed. */ #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) -void fnord () __attribute__((visibility("default"))); +int fnord () __attribute__((visibility("default"))); #endif -void fnord () { int i=42; } +int fnord () { return 42; } int main () { void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); @@ -1866,14 +1943,14 @@ fi fi chmod u+w . 2>&AS_MESSAGE_LOG_FD - $RM -r conftest* + $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest - $RM -r conftest* + $RM conftest* ]) _LT_TAGDECL([compiler_c_o], [lt_cv_prog_compiler_c_o], [1], [Does compiler simultaneously support -c and -o options?]) @@ -1893,7 +1970,7 @@ # do not overwrite the value of need_locks provided by the user AC_MSG_CHECKING([if we can lock with hard links]) hard_links=yes - $RM -r conftest* + $RM conftest* ln conftest.a conftest.b 2>/dev/null && hard_links=no touch conftest.a ln conftest.a conftest.b 2>&5 || hard_links=no @@ -2201,8 +2278,9 @@ need_version=no need_lib_prefix=no - case $GCC,$host_os in - yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) + case $GCC,$cc_basename in + yes,*) + # gcc library_names_spec='$libname.dll.a' # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \${file}`~ @@ -2235,13 +2313,71 @@ library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' ;; esac + dynamic_linker='Win32 ld.exe' + ;; + + *,cl*) + # Native MSVC + libname_spec='$name' + soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' + library_names_spec='${libname}.dll.lib' + + case $build_os in + mingw*) + sys_lib_search_path_spec= + lt_save_ifs=$IFS + IFS=';' + for lt_path in $LIB + do + IFS=$lt_save_ifs + # Let DOS variable expansion print the short 8.3 style file name. + lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` + sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" + done + IFS=$lt_save_ifs + # Convert to MSYS style. + sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([[a-zA-Z]]\\):| /\\1|g' -e 's|^ ||'` + ;; + cygwin*) + # Convert to unix form, then to dos form, then back to unix form + # but this time dos style (no spaces!) so that the unix form looks + # like /cygdrive/c/PROGRA~1:/cygdr... + sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` + sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` + sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` + ;; + *) + sys_lib_search_path_spec="$LIB" + if $ECHO "$sys_lib_search_path_spec" | [$GREP ';[c-zC-Z]:/' >/dev/null]; then + # It is most probably a Windows format PATH. + sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` + else + sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` + fi + # FIXME: find the short name or the path components, as spaces are + # common. (e.g. "Program Files" -> "PROGRA~1") + ;; + esac + + # DLL is installed to $(libdir)/../bin by postinstall_cmds + postinstall_cmds='base_file=`basename \${file}`~ + dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ + dldir=$destdir/`dirname \$dlpath`~ + test -d \$dldir || mkdir -p \$dldir~ + $install_prog $dir/$dlname \$dldir/$dlname' + postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ + dlpath=$dir/\$dldll~ + $RM \$dlpath' + shlibpath_overrides_runpath=yes + dynamic_linker='Win32 link.exe' ;; *) + # Assume MSVC wrapper library_names_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext} $libname.lib' + dynamic_linker='Win32 ld.exe' ;; esac - dynamic_linker='Win32 ld.exe' # FIXME: first we should search . and the directory the executable is in shlibpath_var=PATH ;; @@ -2251,7 +2387,7 @@ version_type=darwin need_lib_prefix=no need_version=no - library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext ${libname}${release}${versuffix}$shared_ext' + library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' soname_spec='${libname}${release}${major}$shared_ext' shlibpath_overrides_runpath=yes shlibpath_var=DYLD_LIBRARY_PATH @@ -2285,7 +2421,14 @@ *) objformat=elf ;; esac fi - version_type=freebsd-$objformat + # Handle Gentoo/FreeBSD as it was Linux + case $host_vendor in + gentoo) + version_type=linux ;; + *) + version_type=freebsd-$objformat ;; + esac + case $version_type in freebsd-elf*) library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' @@ -2296,6 +2439,12 @@ library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' need_version=yes ;; + linux) + library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' + soname_spec='${libname}${release}${shared_ext}$major' + need_lib_prefix=no + need_version=no + ;; esac shlibpath_var=LD_LIBRARY_PATH case $host_os in @@ -2945,6 +3094,11 @@ esac reload_cmds='$LD$reload_flag -o $output$reload_objs' case $host_os in + cygwin* | mingw* | pw32* | cegcc*) + if test "$GCC" != yes; then + reload_cmds=false + fi + ;; darwin*) if test "$GCC" = yes; then reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' @@ -3163,6 +3317,21 @@ ;; esac ]) + +file_magic_glob= +want_nocaseglob=no +if test "$build" = "$host"; then + case $host_os in + mingw* | pw32*) + if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then + want_nocaseglob=yes + else + file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[[\1]]\/[[\1]]\/g;/g"` + fi + ;; + esac +fi + file_magic_cmd=$lt_cv_file_magic_cmd deplibs_check_method=$lt_cv_deplibs_check_method test -z "$deplibs_check_method" && deplibs_check_method=unknown @@ -3170,7 +3339,11 @@ _LT_DECL([], [deplibs_check_method], [1], [Method to check whether dependent libraries are shared objects]) _LT_DECL([], [file_magic_cmd], [1], - [Command to use when deplibs_check_method == "file_magic"]) + [Command to use when deplibs_check_method = "file_magic"]) +_LT_DECL([], [file_magic_glob], [1], + [How to find potential files when deplibs_check_method = "file_magic"]) +_LT_DECL([], [want_nocaseglob], [1], + [Find potential files using nocaseglob when deplibs_check_method = "file_magic"]) ])# _LT_CHECK_MAGIC_METHOD @@ -3263,7 +3436,7 @@ if $GREP 'External.*some_variable' conftest.out > /dev/null; then lt_cv_nm_interface="MS dumpbin" fi - rm -r -f conftest*]) + rm -f conftest*]) ])# LT_PATH_NM # Old names: @@ -3273,6 +3446,67 @@ dnl AC_DEFUN([AM_PROG_NM], []) dnl AC_DEFUN([AC_PROG_NM], []) +# _LT_CHECK_SHAREDLIB_FROM_LINKLIB +# -------------------------------- +# how to determine the name of the shared library +# associated with a specific link library. +# -- PORTME fill in with the dynamic library characteristics +m4_defun([_LT_CHECK_SHAREDLIB_FROM_LINKLIB], +[m4_require([_LT_DECL_EGREP]) +m4_require([_LT_DECL_OBJDUMP]) +m4_require([_LT_DECL_DLLTOOL]) +AC_CACHE_CHECK([how to associate runtime and link libraries], +lt_cv_sharedlib_from_linklib_cmd, +[lt_cv_sharedlib_from_linklib_cmd='unknown' + +case $host_os in +cygwin* | mingw* | pw32* | cegcc*) + # two different shell functions defined in ltmain.sh + # decide which to use based on capabilities of $DLLTOOL + case `$DLLTOOL --help 2>&1` in + *--identify-strict*) + lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib + ;; + *) + lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback + ;; + esac + ;; +*) + # fallback: assume linklib IS sharedlib + lt_cv_sharedlib_from_linklib_cmd="$ECHO" + ;; +esac +]) +sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd +test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO + +_LT_DECL([], [sharedlib_from_linklib_cmd], [1], + [Command to associate shared and link libraries]) +])# _LT_CHECK_SHAREDLIB_FROM_LINKLIB + + +# _LT_PATH_MANIFEST_TOOL +# ---------------------- +# locate the manifest tool +m4_defun([_LT_PATH_MANIFEST_TOOL], +[AC_CHECK_TOOL(MANIFEST_TOOL, mt, :) +test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt +AC_CACHE_CHECK([if $MANIFEST_TOOL is a manifest tool], [lt_cv_path_mainfest_tool], + [lt_cv_path_mainfest_tool=no + echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&AS_MESSAGE_LOG_FD + $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out + cat conftest.err >&AS_MESSAGE_LOG_FD + if $GREP 'Manifest Tool' conftest.out > /dev/null; then + lt_cv_path_mainfest_tool=yes + fi + rm -f conftest*]) +if test "x$lt_cv_path_mainfest_tool" != xyes; then + MANIFEST_TOOL=: +fi +_LT_DECL([], [MANIFEST_TOOL], [1], [Manifest tool])dnl +])# _LT_PATH_MANIFEST_TOOL + # LT_LIB_M # -------- @@ -3399,8 +3633,8 @@ lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" # Transform an extracted symbol line into symbol name and symbol address -lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p'" -lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([[^ ]]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \(lib[[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"lib\2\", (void *) \&\2},/p'" +lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p'" +lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \(lib[[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"lib\2\", (void *) \&\2},/p'" # Handle CRLF in mingw tool chain opt_cr= @@ -3436,11 +3670,12 @@ else lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[[ ]]\($symcode$symcode*\)[[ ]][[ ]]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" fi + lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" # Check to see that the pipe works correctly. pipe_works=no - rm -r -f conftest* + rm -f conftest* cat > conftest.$ac_ext <<_LT_EOF #ifdef __cplusplus extern "C" { @@ -3469,6 +3704,18 @@ if $GREP ' nm_test_var$' "$nlist" >/dev/null; then if $GREP ' nm_test_func$' "$nlist" >/dev/null; then cat <<_LT_EOF > conftest.$ac_ext +/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ +#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) +/* DATA imports from DLLs on WIN32 con't be const, because runtime + relocations are performed -- see ld's documentation on pseudo-relocs. */ +# define LT@&t@_DLSYM_CONST +#elif defined(__osf__) +/* This system does not cope well with relocations in const data. */ +# define LT@&t@_DLSYM_CONST +#else +# define LT@&t@_DLSYM_CONST const +#endif + #ifdef __cplusplus extern "C" { #endif @@ -3480,7 +3727,7 @@ cat <<_LT_EOF >> conftest.$ac_ext /* The mapping between symbol names and symbols. */ -const struct { +LT@&t@_DLSYM_CONST struct { const char *name; void *address; } @@ -3506,15 +3753,15 @@ _LT_EOF # Now try linking the two files. mv conftest.$ac_objext conftstm.$ac_objext - lt_save_LIBS="$LIBS" - lt_save_CFLAGS="$CFLAGS" + lt_globsym_save_LIBS=$LIBS + lt_globsym_save_CFLAGS=$CFLAGS LIBS="conftstm.$ac_objext" CFLAGS="$CFLAGS$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)" if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext}; then pipe_works=yes fi - LIBS="$lt_save_LIBS" - CFLAGS="$lt_save_CFLAGS" + LIBS=$lt_globsym_save_LIBS + CFLAGS=$lt_globsym_save_CFLAGS else echo "cannot find nm_test_func in $nlist" >&AS_MESSAGE_LOG_FD fi @@ -3547,6 +3794,13 @@ AC_MSG_RESULT(ok) fi +# Response file support. +if test "$lt_cv_nm_interface" = "MS dumpbin"; then + nm_file_list_spec='@' +elif $NM --help 2>/dev/null | grep '[[@]]FILE' >/dev/null; then + nm_file_list_spec='@' +fi + _LT_DECL([global_symbol_pipe], [lt_cv_sys_global_symbol_pipe], [1], [Take the output of nm and produce a listing of raw symbols and C names]) _LT_DECL([global_symbol_to_cdecl], [lt_cv_sys_global_symbol_to_cdecl], [1], @@ -3557,6 +3811,8 @@ _LT_DECL([global_symbol_to_c_name_address_lib_prefix], [lt_cv_sys_global_symbol_to_c_name_address_lib_prefix], [1], [Transform the output of nm in a C name address pair when lib prefix is needed]) +_LT_DECL([], [nm_file_list_spec], [1], + [Specify filename containing input files for $NM]) ]) # _LT_CMD_GLOBAL_SYMBOLS @@ -3568,7 +3824,6 @@ _LT_TAGVAR(lt_prog_compiler_pic, $1)= _LT_TAGVAR(lt_prog_compiler_static, $1)= -AC_MSG_CHECKING([for $compiler option to produce PIC]) m4_if([$1], [CXX], [ # C++ specific cases for pic, static, wl, etc. if test "$GXX" = yes; then @@ -3673,6 +3928,12 @@ ;; esac ;; + mingw* | cygwin* | os2* | pw32* | cegcc*) + # This hack is so that the source file can tell whether it is being + # built for inclusion in a dll (and should export symbols for example). + m4_if([$1], [GCJ], [], + [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) + ;; dgux*) case $cc_basename in ec++*) @@ -4047,6 +4308,12 @@ _LT_TAGVAR(lt_prog_compiler_pic, $1)='--shared' _LT_TAGVAR(lt_prog_compiler_static, $1)='--static' ;; + nagfor*) + # NAG Fortran compiler + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,-Wl,,' + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' + ;; pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) # Portland Group compilers (*not* the Pentium gcc compiler, # which looks to be a dead project) @@ -4166,9 +4433,11 @@ _LT_TAGVAR(lt_prog_compiler_pic, $1)="$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])" ;; esac -AC_MSG_RESULT([$_LT_TAGVAR(lt_prog_compiler_pic, $1)]) -_LT_TAGDECL([wl], [lt_prog_compiler_wl], [1], - [How to pass a linker flag through the compiler]) + +AC_CACHE_CHECK([for $compiler option to produce PIC], + [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)], + [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_prog_compiler_pic, $1)]) +_LT_TAGVAR(lt_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_cv_prog_compiler_pic, $1) # # Check to make sure the PIC flag actually works. @@ -4187,6 +4456,8 @@ _LT_TAGDECL([pic_flag], [lt_prog_compiler_pic], [1], [Additional compiler flags for building library objects]) +_LT_TAGDECL([wl], [lt_prog_compiler_wl], [1], + [How to pass a linker flag through the compiler]) # # Check to make sure the static flag actually works. # @@ -4207,6 +4478,7 @@ m4_defun([_LT_LINKER_SHLIBS], [AC_REQUIRE([LT_PATH_LD])dnl AC_REQUIRE([LT_PATH_NM])dnl +m4_require([_LT_PATH_MANIFEST_TOOL])dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_EGREP])dnl m4_require([_LT_DECL_SED])dnl @@ -4215,6 +4487,7 @@ AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries]) m4_if([$1], [CXX], [ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' + _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'] case $host_os in aix[[4-9]]*) # If we're using GNU nm, then we don't want the "-C" option. @@ -4229,15 +4502,20 @@ ;; pw32*) _LT_TAGVAR(export_symbols_cmds, $1)="$ltdll_cmds" - ;; + ;; cygwin* | mingw* | cegcc*) - _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;/^.*[[ ]]__nm__/s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols' - ;; + case $cc_basename in + cl*) ;; + *) + _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols' + _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'] + ;; + esac + ;; *) _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' - ;; + ;; esac - _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'] ], [ runpath_var= _LT_TAGVAR(allow_undefined_flag, $1)= @@ -4405,7 +4683,8 @@ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes - _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols' + _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols' + _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'] if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' @@ -4453,7 +4732,7 @@ if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ && test "$tmp_diet" = no then - tmp_addflag= + tmp_addflag=' $pic_flag' tmp_sharedflag='-shared' case $cc_basename,$host_cpu in pgcc*) # Portland Group C compiler @@ -4523,8 +4802,8 @@ _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' wlarc= else - _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' fi ;; @@ -4542,8 +4821,8 @@ _LT_EOF elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then - _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi @@ -4589,8 +4868,8 @@ *) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then - _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi @@ -4720,7 +4999,7 @@ _LT_TAGVAR(allow_undefined_flag, $1)='-berok' # Determine the default libpath from the value encoded in an # empty executable. - _LT_SYS_MODULE_PATH_AIX + _LT_SYS_MODULE_PATH_AIX([$1]) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" else @@ -4731,7 +5010,7 @@ else # Determine the default libpath from the value encoded in an # empty executable. - _LT_SYS_MODULE_PATH_AIX + _LT_SYS_MODULE_PATH_AIX([$1]) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. @@ -4775,20 +5054,63 @@ # Microsoft Visual C++. # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' - _LT_TAGVAR(allow_undefined_flag, $1)=unsupported - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. - shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. - _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' - # The linker will automatically build a .lib file if we build a DLL. - _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' - # FIXME: Should let the user specify the lib program. - _LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs' - _LT_TAGVAR(fix_srcfile_path, $1)='`cygpath -w "$srcfile"`' - _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes + case $cc_basename in + cl*) + # Native MSVC + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' + _LT_TAGVAR(allow_undefined_flag, $1)=unsupported + _LT_TAGVAR(always_export_symbols, $1)=yes + _LT_TAGVAR(file_list_spec, $1)='@' + # Tell ltmain to make .lib files, not .a files. + libext=lib + # Tell ltmain to make .dll files, not .so files. + shrext_cmds=".dll" + # FIXME: Setting linknames here is a bad hack. + _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' + _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then + sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; + else + sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; + fi~ + $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ + linknames=' + # The linker will not automatically build a static lib if we build a DLL. + # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' + _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes + _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1,DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols' + # Don't use ranlib + _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib' + _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~ + lt_tool_outputfile="@TOOL_OUTPUT@"~ + case $lt_outputfile in + *.exe|*.EXE) ;; + *) + lt_outputfile="$lt_outputfile.exe" + lt_tool_outputfile="$lt_tool_outputfile.exe" + ;; + esac~ + if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then + $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; + $RM "$lt_outputfile.manifest"; + fi' + ;; + *) + # Assume MSVC wrapper + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' + _LT_TAGVAR(allow_undefined_flag, $1)=unsupported + # Tell ltmain to make .lib files, not .a files. + libext=lib + # Tell ltmain to make .dll files, not .so files. + shrext_cmds=".dll" + # FIXME: Setting linknames here is a bad hack. + _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' + # The linker will automatically build a .lib file if we build a DLL. + _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' + # FIXME: Should let the user specify the lib program. + _LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs' + _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes + ;; + esac ;; darwin* | rhapsody*) @@ -4826,7 +5148,7 @@ # FreeBSD 3 and greater uses gcc -shared to do shared libraries. freebsd* | dragonfly*) - _LT_TAGVAR(archive_cmds, $1)='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no @@ -4834,7 +5156,7 @@ hpux9*) if test "$GCC" = yes; then - _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' else _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' fi @@ -4850,7 +5172,7 @@ hpux10*) if test "$GCC" = yes && test "$with_gnu_ld" = no; then - _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' else _LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' fi @@ -4874,10 +5196,10 @@ _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ;; ia64*) - _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ;; *) - _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ;; esac else @@ -4924,16 +5246,31 @@ irix5* | irix6* | nonstopux*) if test "$GCC" = yes; then - _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' # Try to use the -exported_symbol ld option, if it does not # work, assume that -exports_file does not work either and # implicitly export all symbols. - save_LDFLAGS="$LDFLAGS" - LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" - AC_LINK_IFELSE(int foo(void) {}, - _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' - ) - LDFLAGS="$save_LDFLAGS" + # This should be the same for all languages, so no per-tag cache variable. + AC_CACHE_CHECK([whether the $host_os linker accepts -exported_symbol], + [lt_cv_irix_exported_symbol], + [save_LDFLAGS="$LDFLAGS" + LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" + AC_LINK_IFELSE( + [AC_LANG_SOURCE( + [AC_LANG_CASE([C], [[int foo (void) { return 0; }]], + [C++], [[int foo (void) { return 0; }]], + [Fortran 77], [[ + subroutine foo + end]], + [Fortran], [[ + subroutine foo + end]])])], + [lt_cv_irix_exported_symbol=yes], + [lt_cv_irix_exported_symbol=no]) + LDFLAGS="$save_LDFLAGS"]) + if test "$lt_cv_irix_exported_symbol" = yes; then + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' + fi else _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' @@ -5018,7 +5355,7 @@ osf4* | osf5*) # as osf3* with the addition of -msym flag if test "$GCC" = yes; then _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' - _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' else _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' @@ -5037,9 +5374,9 @@ _LT_TAGVAR(no_undefined_flag, $1)=' -z defs' if test "$GCC" = yes; then wlarc='${wl}' - _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ - $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' + $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' else case `$CC -V 2>&1` in *"Compilers 5.0"*) @@ -5217,7 +5554,7 @@ # to ld, don't add -lc before -lgcc. AC_CACHE_CHECK([whether -lc should be explicitly linked in], [lt_cv_]_LT_TAGVAR(archive_cmds_need_lc, $1), - [$RM -r conftest* + [$RM conftest* echo "$lt_simple_compile_test_code" > conftest.$ac_ext if AC_TRY_EVAL(ac_compile) 2>conftest.err; then @@ -5244,7 +5581,7 @@ else cat conftest.err 1>&5 fi - $RM -r conftest* + $RM conftest* ]) _LT_TAGVAR(archive_cmds_need_lc, $1)=$lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1) ;; @@ -5311,8 +5648,6 @@ to runtime path list]) _LT_TAGDECL([], [link_all_deplibs], [0], [Whether libtool must link a program against all its dependency libraries]) -_LT_TAGDECL([], [fix_srcfile_path], [1], - [Fix the shell variable $srcfile for the compiler]) _LT_TAGDECL([], [always_export_symbols], [0], [Set to "yes" if exported symbols are required]) _LT_TAGDECL([], [export_symbols_cmds], [2], @@ -5323,6 +5658,8 @@ [Symbols that must always be exported]) _LT_TAGDECL([], [prelink_cmds], [2], [Commands necessary for linking programs (against libraries) with templates]) +_LT_TAGDECL([], [postlink_cmds], [2], + [Commands necessary for finishing linking programs]) _LT_TAGDECL([], [file_list_spec], [1], [Specify filename containing input files]) dnl FIXME: Not yet implemented @@ -5424,6 +5761,7 @@ m4_defun([_LT_LANG_CXX_CONFIG], [m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_EGREP])dnl +m4_require([_LT_PATH_MANIFEST_TOOL])dnl if test -n "$CXX" && ( test "X$CXX" != "Xno" && ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) || (test "X$CXX" != "Xg++"))) ; then @@ -5485,6 +5823,7 @@ # Allow CC to be a program name with arguments. lt_save_CC=$CC + lt_save_CFLAGS=$CFLAGS lt_save_LD=$LD lt_save_GCC=$GCC GCC=$GXX @@ -5502,6 +5841,7 @@ fi test -z "${LDCXX+set}" || LD=$LDCXX CC=${CXX-"c++"} + CFLAGS=$CXXFLAGS compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_CC_BASENAME([$compiler]) @@ -5523,8 +5863,8 @@ # Check if GNU C++ uses GNU ld as the underlying linker, since the # archiving commands below assume that GNU ld is being used. if test "$with_gnu_ld" = yes; then - _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' - _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + _LT_TAGVAR(archive_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' @@ -5665,7 +6005,7 @@ _LT_TAGVAR(allow_undefined_flag, $1)='-berok' # Determine the default libpath from the value encoded in an empty # executable. - _LT_SYS_MODULE_PATH_AIX + _LT_SYS_MODULE_PATH_AIX([$1]) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" @@ -5677,7 +6017,7 @@ else # Determine the default libpath from the value encoded in an # empty executable. - _LT_SYS_MODULE_PATH_AIX + _LT_SYS_MODULE_PATH_AIX([$1]) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. @@ -5719,29 +6059,75 @@ ;; cygwin* | mingw* | pw32* | cegcc*) - # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, - # as there is no search path for DLLs. - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' - _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols' - _LT_TAGVAR(allow_undefined_flag, $1)=unsupported - _LT_TAGVAR(always_export_symbols, $1)=no - _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then - _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - # If the export-symbols file already is a .def file (1st line - # is EXPORTS), use it as is; otherwise, prepend... - _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then - cp $export_symbols $output_objdir/$soname.def; - else - echo EXPORTS > $output_objdir/$soname.def; - cat $export_symbols >> $output_objdir/$soname.def; - fi~ - $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - else - _LT_TAGVAR(ld_shlibs, $1)=no - fi - ;; + case $GXX,$cc_basename in + ,cl* | no,cl*) + # Native MSVC + # hardcode_libdir_flag_spec is actually meaningless, as there is + # no search path for DLLs. + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' + _LT_TAGVAR(allow_undefined_flag, $1)=unsupported + _LT_TAGVAR(always_export_symbols, $1)=yes + _LT_TAGVAR(file_list_spec, $1)='@' + # Tell ltmain to make .lib files, not .a files. + libext=lib + # Tell ltmain to make .dll files, not .so files. + shrext_cmds=".dll" + # FIXME: Setting linknames here is a bad hack. + _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' + _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then + $SED -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; + else + $SED -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; + fi~ + $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ + linknames=' + # The linker will not automatically build a static lib if we build a DLL. + # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' + _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes + # Don't use ranlib + _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib' + _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~ + lt_tool_outputfile="@TOOL_OUTPUT@"~ + case $lt_outputfile in + *.exe|*.EXE) ;; + *) + lt_outputfile="$lt_outputfile.exe" + lt_tool_outputfile="$lt_tool_outputfile.exe" + ;; + esac~ + func_to_tool_file "$lt_outputfile"~ + if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then + $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; + $RM "$lt_outputfile.manifest"; + fi' + ;; + *) + # g++ + # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, + # as there is no search path for DLLs. + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' + _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols' + _LT_TAGVAR(allow_undefined_flag, $1)=unsupported + _LT_TAGVAR(always_export_symbols, $1)=no + _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes + + if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then + _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' + # If the export-symbols file already is a .def file (1st line + # is EXPORTS), use it as is; otherwise, prepend... + _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then + cp $export_symbols $output_objdir/$soname.def; + else + echo EXPORTS > $output_objdir/$soname.def; + cat $export_symbols >> $output_objdir/$soname.def; + fi~ + $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' + else + _LT_TAGVAR(ld_shlibs, $1)=no + fi + ;; + esac + ;; darwin* | rhapsody*) _LT_DARWIN_LINKER_FEATURES($1) ;; @@ -5816,7 +6202,7 @@ ;; *) if test "$GXX" = yes; then - _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' else # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no @@ -5887,10 +6273,10 @@ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; ia64*) - _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; *) - _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; esac fi @@ -5931,9 +6317,9 @@ *) if test "$GXX" = yes; then if test "$with_gnu_ld" = no; then - _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' else - _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib' fi fi _LT_TAGVAR(link_all_deplibs, $1)=yes @@ -6211,7 +6597,7 @@ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ;; *) - _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ;; esac @@ -6298,9 +6684,9 @@ if test "$GXX" = yes && test "$with_gnu_ld" = no; then _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-z ${wl}defs' if $CC --version | $GREP -v '^2\.7' > /dev/null; then - _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ - $CC -shared -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' + $CC -shared $pic_flag -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when @@ -6429,6 +6815,7 @@ fi # test -n "$compiler" CC=$lt_save_CC + CFLAGS=$lt_save_CFLAGS LDCXX=$LD LD=$lt_save_LD GCC=$lt_save_GCC @@ -6443,6 +6830,29 @@ ])# _LT_LANG_CXX_CONFIG +# _LT_FUNC_STRIPNAME_CNF +# ---------------------- +# func_stripname_cnf prefix suffix name +# strip PREFIX and SUFFIX off of NAME. +# PREFIX and SUFFIX must not contain globbing or regex special +# characters, hashes, percent signs, but SUFFIX may contain a leading +# dot (in which case that matches only a dot). +# +# This function is identical to the (non-XSI) version of func_stripname, +# except this one can be used by m4 code that may be executed by configure, +# rather than the libtool script. +m4_defun([_LT_FUNC_STRIPNAME_CNF],[dnl +AC_REQUIRE([_LT_DECL_SED]) +AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH]) +func_stripname_cnf () +{ + case ${2} in + .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; + *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; + esac +} # func_stripname_cnf +])# _LT_FUNC_STRIPNAME_CNF + # _LT_SYS_HIDDEN_LIBDEPS([TAGNAME]) # --------------------------------- # Figure out "hidden" library dependencies from verbose @@ -6451,6 +6861,7 @@ # objects, libraries and library flags. m4_defun([_LT_SYS_HIDDEN_LIBDEPS], [m4_require([_LT_FILEUTILS_DEFAULTS])dnl +AC_REQUIRE([_LT_FUNC_STRIPNAME_CNF])dnl # Dependencies to place before and after the object being linked: _LT_TAGVAR(predep_objects, $1)= _LT_TAGVAR(postdep_objects, $1)= @@ -6501,6 +6912,13 @@ }; _LT_EOF ]) + +_lt_libdeps_save_CFLAGS=$CFLAGS +case "$CC $CFLAGS " in #( +*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;; +*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;; +esac + dnl Parse the compiler output and extract the necessary dnl objects, libraries and library flags. if AC_TRY_EVAL(ac_compile); then @@ -6512,7 +6930,7 @@ pre_test_object_deps_done=no for p in `eval "$output_verbose_link_cmd"`; do - case $p in + case ${prev}${p} in -L* | -R* | -l*) # Some compilers place space between "-{L,R}" and the path. @@ -6521,13 +6939,22 @@ test $p = "-R"; then prev=$p continue - else - prev= fi + # Expand the sysroot to ease extracting the directories later. + if test -z "$prev"; then + case $p in + -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;; + -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;; + -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;; + esac + fi + case $p in + =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;; + esac if test "$pre_test_object_deps_done" = no; then - case $p in - -L* | -R*) + case ${prev} in + -L | -R) # Internal compiler library paths should come after those # provided the user. The postdeps already come after the # user supplied libs so there is no need to process them. @@ -6547,8 +6974,10 @@ _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} ${prev}${p}" fi fi + prev= ;; + *.lto.$objext) ;; # Ignore GCC LTO objects *.$objext) # This assumes that the test object file only shows up # once in the compiler output. @@ -6584,6 +7013,7 @@ fi $RM -f confest.$objext +CFLAGS=$_lt_libdeps_save_CFLAGS # PORTME: override above test on systems where it is broken m4_if([$1], [CXX], @@ -6733,7 +7163,9 @@ # Allow CC to be a program name with arguments. lt_save_CC="$CC" lt_save_GCC=$GCC + lt_save_CFLAGS=$CFLAGS CC=${F77-"f77"} + CFLAGS=$FFLAGS compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_CC_BASENAME([$compiler]) @@ -6787,6 +7219,7 @@ GCC=$lt_save_GCC CC="$lt_save_CC" + CFLAGS="$lt_save_CFLAGS" fi # test "$_lt_disable_F77" != yes AC_LANG_POP @@ -6863,7 +7296,9 @@ # Allow CC to be a program name with arguments. lt_save_CC="$CC" lt_save_GCC=$GCC + lt_save_CFLAGS=$CFLAGS CC=${FC-"f95"} + CFLAGS=$FCFLAGS compiler=$CC GCC=$ac_cv_fc_compiler_gnu @@ -6919,7 +7354,8 @@ fi # test -n "$compiler" GCC=$lt_save_GCC - CC="$lt_save_CC" + CC=$lt_save_CC + CFLAGS=$lt_save_CFLAGS fi # test "$_lt_disable_FC" != yes AC_LANG_POP @@ -6956,10 +7392,12 @@ _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. -lt_save_CC="$CC" +lt_save_CC=$CC +lt_save_CFLAGS=$CFLAGS lt_save_GCC=$GCC GCC=yes CC=${GCJ-"gcj"} +CFLAGS=$GCJFLAGS compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_TAGVAR(LD, $1)="$LD" @@ -6990,7 +7428,8 @@ AC_LANG_RESTORE GCC=$lt_save_GCC -CC="$lt_save_CC" +CC=$lt_save_CC +CFLAGS=$lt_save_CFLAGS ])# _LT_LANG_GCJ_CONFIG @@ -7025,9 +7464,11 @@ # Allow CC to be a program name with arguments. lt_save_CC="$CC" +lt_save_CFLAGS=$CFLAGS lt_save_GCC=$GCC GCC= CC=${RC-"windres"} +CFLAGS= compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_CC_BASENAME([$compiler]) @@ -7040,7 +7481,8 @@ GCC=$lt_save_GCC AC_LANG_RESTORE -CC="$lt_save_CC" +CC=$lt_save_CC +CFLAGS=$lt_save_CFLAGS ])# _LT_LANG_RC_CONFIG @@ -7099,6 +7541,15 @@ AC_SUBST([OBJDUMP]) ]) +# _LT_DECL_DLLTOOL +# ---------------- +# Ensure DLLTOOL variable is set. +m4_defun([_LT_DECL_DLLTOOL], +[AC_CHECK_TOOL(DLLTOOL, dlltool, false) +test -z "$DLLTOOL" && DLLTOOL=dlltool +_LT_DECL([], [DLLTOOL], [1], [DLL creation program]) +AC_SUBST([DLLTOOL]) +]) # _LT_DECL_SED # ------------ @@ -7192,8 +7643,8 @@ # Try some XSI features xsi_shell=no ( _lt_dummy="a/b/c" - test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ - = c,a/b,, \ + test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ + = c,a/b,b/c, \ && eval 'test $(( 1 + 1 )) -eq 2 \ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ && xsi_shell=yes @@ -7232,206 +7683,162 @@ ])# _LT_CHECK_SHELL_FEATURES -# _LT_PROG_XSI_SHELLFNS -# --------------------- -# Bourne and XSI compatible variants of some useful shell functions. -m4_defun([_LT_PROG_XSI_SHELLFNS], -[case $xsi_shell in - yes) - cat << \_LT_EOF >> "$cfgfile" - -# func_dirname file append nondir_replacement -# Compute the dirname of FILE. If nonempty, add APPEND to the result, -# otherwise set result to NONDIR_REPLACEMENT. -func_dirname () -{ - case ${1} in - */*) func_dirname_result="${1%/*}${2}" ;; - * ) func_dirname_result="${3}" ;; - esac -} - -# func_basename file -func_basename () -{ - func_basename_result="${1##*/}" -} - -# func_dirname_and_basename file append nondir_replacement -# perform func_basename and func_dirname in a single function -# call: -# dirname: Compute the dirname of FILE. If nonempty, -# add APPEND to the result, otherwise set result -# to NONDIR_REPLACEMENT. -# value returned in "$func_dirname_result" -# basename: Compute filename of FILE. -# value retuned in "$func_basename_result" -# Implementation must be kept synchronized with func_dirname -# and func_basename. For efficiency, we do not delegate to -# those functions but instead duplicate the functionality here. -func_dirname_and_basename () -{ - case ${1} in - */*) func_dirname_result="${1%/*}${2}" ;; - * ) func_dirname_result="${3}" ;; - esac - func_basename_result="${1##*/}" -} - -# func_stripname prefix suffix name -# strip PREFIX and SUFFIX off of NAME. -# PREFIX and SUFFIX must not contain globbing or regex special -# characters, hashes, percent signs, but SUFFIX may contain a leading -# dot (in which case that matches only a dot). -func_stripname () -{ - # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are - # positional parameters, so assign one to ordinary parameter first. - func_stripname_result=${3} - func_stripname_result=${func_stripname_result#"${1}"} - func_stripname_result=${func_stripname_result%"${2}"} -} - -# func_opt_split -func_opt_split () -{ - func_opt_split_opt=${1%%=*} - func_opt_split_arg=${1#*=} -} - -# func_lo2o object -func_lo2o () -{ - case ${1} in - *.lo) func_lo2o_result=${1%.lo}.${objext} ;; - *) func_lo2o_result=${1} ;; - esac -} - -# func_xform libobj-or-source -func_xform () -{ - func_xform_result=${1%.*}.lo -} - -# func_arith arithmetic-term... -func_arith () -{ - func_arith_result=$(( $[*] )) -} - -# func_len string -# STRING may not start with a hyphen. -func_len () -{ - func_len_result=${#1} -} - -_LT_EOF - ;; - *) # Bourne compatible functions. - cat << \_LT_EOF >> "$cfgfile" - -# func_dirname file append nondir_replacement -# Compute the dirname of FILE. If nonempty, add APPEND to the result, -# otherwise set result to NONDIR_REPLACEMENT. -func_dirname () -{ - # Extract subdirectory from the argument. - func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` - if test "X$func_dirname_result" = "X${1}"; then - func_dirname_result="${3}" - else - func_dirname_result="$func_dirname_result${2}" - fi -} - -# func_basename file -func_basename () -{ - func_basename_result=`$ECHO "${1}" | $SED "$basename"` -} +# _LT_PROG_FUNCTION_REPLACE (FUNCNAME, REPLACEMENT-BODY) +# ------------------------------------------------------ +# In `$cfgfile', look for function FUNCNAME delimited by `^FUNCNAME ()$' and +# '^} FUNCNAME ', and replace its body with REPLACEMENT-BODY. +m4_defun([_LT_PROG_FUNCTION_REPLACE], +[dnl { +sed -e '/^$1 ()$/,/^} # $1 /c\ +$1 ()\ +{\ +m4_bpatsubsts([$2], [$], [\\], [^\([ ]\)], [\\\1]) +} # Extended-shell $1 implementation' "$cfgfile" > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") +test 0 -eq $? || _lt_function_replace_fail=: +]) -dnl func_dirname_and_basename -dnl A portable version of this function is already defined in general.m4sh -dnl so there is no need for it here. -# func_stripname prefix suffix name -# strip PREFIX and SUFFIX off of NAME. -# PREFIX and SUFFIX must not contain globbing or regex special -# characters, hashes, percent signs, but SUFFIX may contain a leading -# dot (in which case that matches only a dot). -# func_strip_suffix prefix name -func_stripname () -{ - case ${2} in - .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; - *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; - esac -} +# _LT_PROG_REPLACE_SHELLFNS +# ------------------------- +# Replace existing portable implementations of several shell functions with +# equivalent extended shell implementations where those features are available.. +m4_defun([_LT_PROG_REPLACE_SHELLFNS], +[if test x"$xsi_shell" = xyes; then + _LT_PROG_FUNCTION_REPLACE([func_dirname], [dnl + case ${1} in + */*) func_dirname_result="${1%/*}${2}" ;; + * ) func_dirname_result="${3}" ;; + esac]) + + _LT_PROG_FUNCTION_REPLACE([func_basename], [dnl + func_basename_result="${1##*/}"]) + + _LT_PROG_FUNCTION_REPLACE([func_dirname_and_basename], [dnl + case ${1} in + */*) func_dirname_result="${1%/*}${2}" ;; + * ) func_dirname_result="${3}" ;; + esac + func_basename_result="${1##*/}"]) -# sed scripts: -my_sed_long_opt='1s/^\(-[[^=]]*\)=.*/\1/;q' -my_sed_long_arg='1s/^-[[^=]]*=//' + _LT_PROG_FUNCTION_REPLACE([func_stripname], [dnl + # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are + # positional parameters, so assign one to ordinary parameter first. + func_stripname_result=${3} + func_stripname_result=${func_stripname_result#"${1}"} + func_stripname_result=${func_stripname_result%"${2}"}]) + + _LT_PROG_FUNCTION_REPLACE([func_split_long_opt], [dnl + func_split_long_opt_name=${1%%=*} + func_split_long_opt_arg=${1#*=}]) + + _LT_PROG_FUNCTION_REPLACE([func_split_short_opt], [dnl + func_split_short_opt_arg=${1#??} + func_split_short_opt_name=${1%"$func_split_short_opt_arg"}]) + + _LT_PROG_FUNCTION_REPLACE([func_lo2o], [dnl + case ${1} in + *.lo) func_lo2o_result=${1%.lo}.${objext} ;; + *) func_lo2o_result=${1} ;; + esac]) -# func_opt_split -func_opt_split () -{ - func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"` - func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"` -} + _LT_PROG_FUNCTION_REPLACE([func_xform], [ func_xform_result=${1%.*}.lo]) -# func_lo2o object -func_lo2o () -{ - func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` -} + _LT_PROG_FUNCTION_REPLACE([func_arith], [ func_arith_result=$(( $[*] ))]) -# func_xform libobj-or-source -func_xform () -{ - func_xform_result=`$ECHO "${1}" | $SED 's/\.[[^.]]*$/.lo/'` -} + _LT_PROG_FUNCTION_REPLACE([func_len], [ func_len_result=${#1}]) +fi -# func_arith arithmetic-term... -func_arith () -{ - func_arith_result=`expr "$[@]"` -} +if test x"$lt_shell_append" = xyes; then + _LT_PROG_FUNCTION_REPLACE([func_append], [ eval "${1}+=\\${2}"]) -# func_len string -# STRING may not start with a hyphen. -func_len () -{ - func_len_result=`expr "$[1]" : ".*" 2>/dev/null || echo $max_cmd_len` -} + _LT_PROG_FUNCTION_REPLACE([func_append_quoted], [dnl + func_quote_for_eval "${2}" +dnl m4 expansion turns \\\\ into \\, and then the shell eval turns that into \ + eval "${1}+=\\\\ \\$func_quote_for_eval_result"]) + + # Save a `func_append' function call where possible by direct use of '+=' + sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") + test 0 -eq $? || _lt_function_replace_fail=: +else + # Save a `func_append' function call even when '+=' is not available + sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ + && mv -f "$cfgfile.tmp" "$cfgfile" \ + || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") + test 0 -eq $? || _lt_function_replace_fail=: +fi -_LT_EOF -esac +if test x"$_lt_function_replace_fail" = x":"; then + AC_MSG_WARN([Unable to substitute extended shell functions in $ofile]) +fi +]) -case $lt_shell_append in - yes) - cat << \_LT_EOF >> "$cfgfile" - -# func_append var value -# Append VALUE to the end of shell variable VAR. -func_append () -{ - eval "$[1]+=\$[2]" -} -_LT_EOF +# _LT_PATH_CONVERSION_FUNCTIONS +# ----------------------------- +# Determine which file name conversion functions should be used by +# func_to_host_file (and, implicitly, by func_to_host_path). These are needed +# for certain cross-compile configurations and native mingw. +m4_defun([_LT_PATH_CONVERSION_FUNCTIONS], +[AC_REQUIRE([AC_CANONICAL_HOST])dnl +AC_REQUIRE([AC_CANONICAL_BUILD])dnl +AC_MSG_CHECKING([how to convert $build file names to $host format]) +AC_CACHE_VAL(lt_cv_to_host_file_cmd, +[case $host in + *-*-mingw* ) + case $build in + *-*-mingw* ) # actually msys + lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 + ;; + *-*-cygwin* ) + lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 + ;; + * ) # otherwise, assume *nix + lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 + ;; + esac ;; - *) - cat << \_LT_EOF >> "$cfgfile" - -# func_append var value -# Append VALUE to the end of shell variable VAR. -func_append () -{ - eval "$[1]=\$$[1]\$[2]" -} - -_LT_EOF + *-*-cygwin* ) + case $build in + *-*-mingw* ) # actually msys + lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin + ;; + *-*-cygwin* ) + lt_cv_to_host_file_cmd=func_convert_file_noop + ;; + * ) # otherwise, assume *nix + lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin + ;; + esac ;; - esac + * ) # unhandled hosts (and "normal" native builds) + lt_cv_to_host_file_cmd=func_convert_file_noop + ;; +esac +]) +to_host_file_cmd=$lt_cv_to_host_file_cmd +AC_MSG_RESULT([$lt_cv_to_host_file_cmd]) +_LT_DECL([to_host_file_cmd], [lt_cv_to_host_file_cmd], + [0], [convert $build file names to $host format])dnl + +AC_MSG_CHECKING([how to convert $build file names to toolchain format]) +AC_CACHE_VAL(lt_cv_to_tool_file_cmd, +[#assume ordinary cross tools, or native build. +lt_cv_to_tool_file_cmd=func_convert_file_noop +case $host in + *-*-mingw* ) + case $build in + *-*-mingw* ) # actually msys + lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 + ;; + esac + ;; +esac ]) +to_tool_file_cmd=$lt_cv_to_tool_file_cmd +AC_MSG_RESULT([$lt_cv_to_tool_file_cmd]) +_LT_DECL([to_tool_file_cmd], [lt_cv_to_tool_file_cmd], + [0], [convert $build files to toolchain format])dnl +])# _LT_PATH_CONVERSION_FUNCTIONS diff -Nru couchdb-1.2.0/m4/ltversion.m4 couchdb-1.4.0~rc.1/m4/ltversion.m4 --- couchdb-1.2.0/m4/ltversion.m4 2012-03-29 17:05:31.000000000 -0400 +++ couchdb-1.4.0~rc.1/m4/ltversion.m4 2013-08-23 10:57:22.000000000 -0400 @@ -7,17 +7,17 @@ # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. -# Generated from ltversion.in. +# @configure_input@ -# serial 3175 ltversion.m4 +# serial 3293 ltversion.m4 # This file is part of GNU Libtool -m4_define([LT_PACKAGE_VERSION], [2.2.10]) -m4_define([LT_PACKAGE_REVISION], [1.3175]) +m4_define([LT_PACKAGE_VERSION], [2.4]) +m4_define([LT_PACKAGE_REVISION], [1.3293]) AC_DEFUN([LTVERSION_VERSION], -[macro_version='2.2.10' -macro_revision='1.3175' +[macro_version='2.4' +macro_revision='1.3293' _LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?]) _LT_DECL(, macro_revision, 0) ]) diff -Nru couchdb-1.2.0/m4/pkg.m4 couchdb-1.4.0~rc.1/m4/pkg.m4 --- couchdb-1.2.0/m4/pkg.m4 2012-03-29 17:05:31.000000000 -0400 +++ couchdb-1.4.0~rc.1/m4/pkg.m4 1969-12-31 19:00:00.000000000 -0500 @@ -1,157 +0,0 @@ -# pkg.m4 - Macros to locate and utilise pkg-config. -*- Autoconf -*- -# serial 1 (pkg-config-0.24) -# -# Copyright © 2004 Scott James Remnant . -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# As a special exception to the GNU General Public License, if you -# distribute this file as part of a program that contains a -# configuration script generated by Autoconf, you may include it under -# the same distribution terms that you use for the rest of that program. - -# PKG_PROG_PKG_CONFIG([MIN-VERSION]) -# ---------------------------------- -AC_DEFUN([PKG_PROG_PKG_CONFIG], -[m4_pattern_forbid([^_?PKG_[A-Z_]+$]) -m4_pattern_allow([^PKG_CONFIG(_PATH|_LIBDIR)?$]) -AC_ARG_VAR([PKG_CONFIG], [path to pkg-config utility]) -AC_ARG_VAR([PKG_CONFIG_PATH], [directories to add to pkg-config's search path]) -AC_ARG_VAR([PKG_CONFIG_LIBDIR], [path overriding pkg-config's built-in search path]) - -if test "x$ac_cv_env_PKG_CONFIG_set" != "xset"; then - AC_PATH_TOOL([PKG_CONFIG], [pkg-config]) -fi -if test -n "$PKG_CONFIG"; then - _pkg_min_version=m4_default([$1], [0.9.0]) - AC_MSG_CHECKING([pkg-config is at least version $_pkg_min_version]) - if $PKG_CONFIG --atleast-pkgconfig-version $_pkg_min_version; then - AC_MSG_RESULT([yes]) - else - AC_MSG_RESULT([no]) - PKG_CONFIG="" - fi -fi[]dnl -])# PKG_PROG_PKG_CONFIG - -# PKG_CHECK_EXISTS(MODULES, [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND]) -# -# Check to see whether a particular set of modules exists. Similar -# to PKG_CHECK_MODULES(), but does not set variables or print errors. -# -# Please remember that m4 expands AC_REQUIRE([PKG_PROG_PKG_CONFIG]) -# only at the first occurence in configure.ac, so if the first place -# it's called might be skipped (such as if it is within an "if", you -# have to call PKG_CHECK_EXISTS manually -# -------------------------------------------------------------- -AC_DEFUN([PKG_CHECK_EXISTS], -[AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl -if test -n "$PKG_CONFIG" && \ - AC_RUN_LOG([$PKG_CONFIG --exists --print-errors "$1"]); then - m4_default([$2], [:]) -m4_ifvaln([$3], [else - $3])dnl -fi]) - -# _PKG_CONFIG([VARIABLE], [COMMAND], [MODULES]) -# --------------------------------------------- -m4_define([_PKG_CONFIG], -[if test -n "$$1"; then - pkg_cv_[]$1="$$1" - elif test -n "$PKG_CONFIG"; then - PKG_CHECK_EXISTS([$3], - [pkg_cv_[]$1=`$PKG_CONFIG --[]$2 "$3" 2>/dev/null`], - [pkg_failed=yes]) - else - pkg_failed=untried -fi[]dnl -])# _PKG_CONFIG - -# _PKG_SHORT_ERRORS_SUPPORTED -# ----------------------------- -AC_DEFUN([_PKG_SHORT_ERRORS_SUPPORTED], -[AC_REQUIRE([PKG_PROG_PKG_CONFIG]) -if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then - _pkg_short_errors_supported=yes -else - _pkg_short_errors_supported=no -fi[]dnl -])# _PKG_SHORT_ERRORS_SUPPORTED - - -# PKG_CHECK_MODULES(VARIABLE-PREFIX, MODULES, [ACTION-IF-FOUND], -# [ACTION-IF-NOT-FOUND]) -# -# -# Note that if there is a possibility the first call to -# PKG_CHECK_MODULES might not happen, you should be sure to include an -# explicit call to PKG_PROG_PKG_CONFIG in your configure.ac -# -# -# -------------------------------------------------------------- -AC_DEFUN([PKG_CHECK_MODULES], -[AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl -AC_ARG_VAR([$1][_CFLAGS], [C compiler flags for $1, overriding pkg-config])dnl -AC_ARG_VAR([$1][_LIBS], [linker flags for $1, overriding pkg-config])dnl - -pkg_failed=no -AC_MSG_CHECKING([for $1]) - -_PKG_CONFIG([$1][_CFLAGS], [cflags], [$2]) -_PKG_CONFIG([$1][_LIBS], [libs], [$2]) - -m4_define([_PKG_TEXT], [Alternatively, you may set the environment variables $1[]_CFLAGS -and $1[]_LIBS to avoid the need to call pkg-config. -See the pkg-config man page for more details.]) - -if test $pkg_failed = yes; then - AC_MSG_RESULT([no]) - _PKG_SHORT_ERRORS_SUPPORTED - if test $_pkg_short_errors_supported = yes; then - $1[]_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors "$2" 2>&1` - else - $1[]_PKG_ERRORS=`$PKG_CONFIG --print-errors "$2" 2>&1` - fi - # Put the nasty error message in config.log where it belongs - echo "$$1[]_PKG_ERRORS" >&AS_MESSAGE_LOG_FD - - m4_default([$4], [AC_MSG_ERROR( -[Package requirements ($2) were not met: - -$$1_PKG_ERRORS - -Consider adjusting the PKG_CONFIG_PATH environment variable if you -installed software in a non-standard prefix. - -_PKG_TEXT])dnl - ]) -elif test $pkg_failed = untried; then - AC_MSG_RESULT([no]) - m4_default([$4], [AC_MSG_FAILURE( -[The pkg-config script could not be found or is too old. Make sure it -is in your PATH or set the PKG_CONFIG environment variable to the full -path to pkg-config. - -_PKG_TEXT - -To get pkg-config, see .])dnl - ]) -else - $1[]_CFLAGS=$pkg_cv_[]$1[]_CFLAGS - $1[]_LIBS=$pkg_cv_[]$1[]_LIBS - AC_MSG_RESULT([yes]) - $3 -fi[]dnl -])# PKG_CHECK_MODULES diff -Nru couchdb-1.2.0/Makefile.am couchdb-1.4.0~rc.1/Makefile.am --- couchdb-1.2.0/Makefile.am 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/Makefile.am 2013-08-23 10:57:21.000000000 -0400 @@ -17,13 +17,11 @@ localdoc_DATA = \ AUTHORS.gz \ BUGS.gz \ - CHANGES.gz \ DEVELOPERS.gz \ INSTALL.gz \ INSTALL.Unix.gz \ INSTALL.Windows.gz \ LICENSE.gz \ - NEWS.gz \ NOTICE.gz \ README.gz \ THANKS.gz @@ -33,67 +31,79 @@ EXTRA_DIST = \ AUTHORS \ BUGS \ - CHANGES \ DEVELOPERS \ INSTALL \ INSTALL.Unix \ INSTALL.Windows \ LICENSE \ - NEWS \ NOTICE \ - README \ + README.rst \ THANKS \ + build-aux/dist-error \ + build-aux/sphinx-build \ + build-aux/sphinx-touch \ license.skip -AUTHORS.gz: $(top_srcdir)/AUTHORS - -gzip -9 < $< > $@ +AUTHORS.gz: AUTHORS + gzip -9 < $< > $@ -BUGS.gz: $(top_srcdir)/BUGS - -gzip -9 < $< > $@ +BUGS.gz: BUGS + gzip -9 < $< > $@ -CHANGES.gz: $(top_srcdir)/CHANGES - -gzip -9 < $< > $@ +DEVELOPERS.gz: DEVELOPERS + gzip -9 < $< > $@ -DEVELOPERS.gz: $(top_srcdir)/DEVELOPERS - -gzip -9 < $< > $@ +INSTALL.gz: INSTALL + gzip -9 < $< > $@ -INSTALL.gz: $(top_srcdir)/INSTALL - -gzip -9 < $< > $@ +INSTALL.Unix.gz: INSTALL.Unix + gzip -9 < $< > $@ -INSTALL.Unix.gz: $(top_srcdir)/INSTALL.Unix - -gzip -9 < $< > $@ +INSTALL.Windows.gz: INSTALL.Windows + gzip -9 < $< > $@ -INSTALL.Windows.gz: $(top_srcdir)/INSTALL.Windows - -gzip -9 < $< > $@ +LICENSE.gz: LICENSE + gzip -9 < $< > $@ -LICENSE.gz: $(top_srcdir)/LICENSE - -gzip -9 < $< > $@ +NOTICE.gz: NOTICE + gzip -9 < $< > $@ -NEWS.gz: $(top_srcdir)/NEWS - -gzip -9 < $< > $@ +README.gz: README.rst + gzip -9 < $< > $@ -NOTICE.gz: $(top_srcdir)/NOTICE - -gzip -9 < $< > $@ +THANKS.gz: THANKS + gzip -9 < $< > $@ -README.gz: $(top_srcdir)/README - -gzip -9 < $< > $@ +check: dev check-js +if TESTS + $(top_builddir)/test/etap/run $(top_srcdir)/test/etap +endif -THANKS.gz: $(top_srcdir)/THANKS - -gzip -9 < $< > $@ +check-js: dev +if TESTS +if USE_CURL + MAKE=$(MAKE) $(top_builddir)/test/javascript/run +endif +endif -check: dev - $(top_builddir)/test/etap/run +check-etap: dev +if TESTS + $(top_builddir)/test/etap/run $(top_srcdir)/test/etap +endif cover: dev +if TESTS rm -f cover/*.coverdata COVER=1 COVER_BIN=./src/couchdb/ $(top_builddir)/test/etap/run SRC=./src/couchdb/ \ $(ERL) -noshell \ - -pa src/etap \ - -eval 'etap_report:create()' \ + -pa src/etap \ + -eval 'etap_report:create()' \ -s init stop > /dev/null 2>&1 +endif dev: all +if TESTS @echo "This command is intended for developers to use;" @echo "it creates development ini files as well as a" @echo "$(top_builddir)/tmp structure for development runtime files." @@ -103,6 +113,7 @@ mkdir -p $(top_builddir)/tmp/lib mkdir -p $(top_builddir)/tmp/log mkdir -p $(top_builddir)/tmp/run/couchdb +endif install-data-hook: @echo @@ -112,6 +123,8 @@ rm -fr $(top_builddir)/etc/couchdb/default.d rm -fr $(top_builddir)/etc/couchdb/local.d rm -fr $(top_builddir)/tmp + rm -f $(top_builddir)/couchdb.stdout + rm -f $(top_builddir)/couchdb.stderr .PHONY: local-clean local-clean: maintainer-clean @@ -132,18 +145,9 @@ find $(top_builddir) -type f -name "._*" -exec rm -f {} \; distcheck-hook: + $(MAKE) -C bin distcheck-hook + $(MAKE) -C src/couchdb/priv distcheck-hook + $(MAKE) -C share/doc/build distcheck-hook grep -rL 'http://www.apache.org/licenses/LICENSE-2.0' * \ - | grep -vEf license.skip; \ - test "$$?" -eq 1 - -.PHONY: distsign -distsign: distcheck check - @# @@ unpack archive and run diff -r to double check missing files - @# @@ does automake have anything that does this? - gpg --armor --detach-sig --default-key 8FBFCFBF \ - < $(top_srcdir)/$(distdir).tar.gz \ - > $(top_srcdir)/$(distdir).tar.gz.asc - md5sum $(top_srcdir)/$(distdir).tar.gz \ - > $(top_srcdir)/$(distdir).tar.gz.md5 - sha1sum $(top_srcdir)/$(distdir).tar.gz \ - > $(top_srcdir)/$(distdir).tar.gz.sha + | grep --include= -vEf $(top_srcdir)/license.skip; \ + test "$$?" -eq 1 diff -Nru couchdb-1.2.0/Makefile.in couchdb-1.4.0~rc.1/Makefile.in --- couchdb-1.2.0/Makefile.in 2012-03-29 17:05:39.000000000 -0400 +++ couchdb-1.4.0~rc.1/Makefile.in 2013-08-23 10:57:43.000000000 -0400 @@ -1,8 +1,9 @@ -# Makefile.in generated by automake 1.10 from Makefile.am. +# Makefile.in generated by automake 1.11.6 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, -# 2003, 2004, 2005, 2006 Free Software Foundation, Inc. +# 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software +# Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. @@ -15,9 +16,27 @@ @SET_MAKE@ VPATH = @srcdir@ +am__make_dryrun = \ + { \ + am__dry=no; \ + case $$MAKEFLAGS in \ + *\\[\ \ ]*) \ + echo 'am--echo: ; @echo "AM" OK' | $(MAKE) -f - 2>/dev/null \ + | grep '^AM OK$$' >/dev/null || am__dry=yes;; \ + *) \ + for am__flg in $$MAKEFLAGS; do \ + case $$am__flg in \ + *=*|--*) ;; \ + *n*) am__dry=yes; break;; \ + esac; \ + done;; \ + esac; \ + test $$am__dry = yes; \ + } pkgdatadir = $(datadir)/@PACKAGE@ -pkglibdir = $(libdir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ +pkglibdir = $(libdir)/@PACKAGE@ +pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c @@ -33,21 +52,19 @@ build_triplet = @build@ host_triplet = @host@ subdir = . -DIST_COMMON = README $(am__configure_deps) $(srcdir)/Makefile.am \ +DIST_COMMON = $(am__configure_deps) $(srcdir)/Makefile.am \ $(srcdir)/Makefile.in $(srcdir)/config.h.in \ $(top_srcdir)/configure \ $(top_srcdir)/src/snappy/google-snappy/config.h.in \ $(top_srcdir)/src/snappy/google-snappy/snappy-stubs-public.h.in \ - AUTHORS NEWS THANKS build-aux/config.guess \ - build-aux/config.sub build-aux/depcomp build-aux/install-sh \ - build-aux/ltmain.sh build-aux/missing + AUTHORS THANKS build-aux/config.guess build-aux/config.sub \ + build-aux/depcomp build-aux/install-sh build-aux/ltmain.sh \ + build-aux/missing ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 -am__aclocal_m4_deps = $(top_srcdir)/m4/ac_check_curl.m4 \ - $(top_srcdir)/m4/ac_check_icu.m4 $(top_srcdir)/m4/libtool.m4 \ +am__aclocal_m4_deps = $(top_srcdir)/m4/libtool.m4 \ $(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \ $(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \ - $(top_srcdir)/m4/pkg.m4 $(top_srcdir)/acinclude.m4 \ - $(top_srcdir)/configure.ac + $(top_srcdir)/acinclude.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \ @@ -56,6 +73,7 @@ CONFIG_HEADER = config.h \ $(top_builddir)/src/snappy/google-snappy/config.h CONFIG_CLEAN_FILES = src/snappy/google-snappy/snappy-stubs-public.h +CONFIG_CLEAN_VPATH_FILES = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive dvi-recursive \ @@ -65,17 +83,45 @@ install-pdf-recursive install-ps-recursive install-recursive \ installcheck-recursive installdirs-recursive pdf-recursive \ ps-recursive uninstall-recursive +am__can_run_installinfo = \ + case $$AM_UPDATE_INFO_DIR in \ + n|no|NO) false;; \ + *) (install-info --version) >/dev/null 2>&1;; \ + esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; -am__strip_dir = `echo $$p | sed -e 's|^.*/||'`; +am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; +am__install_max = 40 +am__nobase_strip_setup = \ + srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` +am__nobase_strip = \ + for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" +am__nobase_list = $(am__nobase_strip_setup); \ + for p in $$list; do echo "$$p $$p"; done | \ + sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ + $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ + if (++n[$$2] == $(am__install_max)) \ + { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ + END { for (dir in files) print dir, files[dir] }' +am__base_list = \ + sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ + sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' +am__uninstall_files_from_dir = { \ + test -z "$$files" \ + || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ + || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ + $(am__cd) "$$dir" && rm -f $$files; }; \ + } am__installdirs = "$(DESTDIR)$(localdocdir)" -localdocDATA_INSTALL = $(INSTALL_DATA) DATA = $(localdoc_DATA) RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive +AM_RECURSIVE_TARGETS = $(RECURSIVE_TARGETS:-recursive=) \ + $(RECURSIVE_CLEAN_TARGETS:-recursive=) tags TAGS ctags CTAGS \ + distdir dist dist-all distcheck ETAGS = etags CTAGS = ctags DIST_SUBDIRS = $(SUBDIRS) @@ -83,12 +129,41 @@ distdir = $(PACKAGE)-$(VERSION) top_distdir = $(distdir) am__remove_distdir = \ - { test ! -d $(distdir) \ - || { find $(distdir) -type d ! -perm -200 -exec chmod u+w {} ';' \ - && rm -fr $(distdir); }; } + if test -d "$(distdir)"; then \ + find "$(distdir)" -type d ! -perm -200 -exec chmod u+w {} ';' \ + && rm -rf "$(distdir)" \ + || { sleep 5 && rm -rf "$(distdir)"; }; \ + else :; fi +am__relativize = \ + dir0=`pwd`; \ + sed_first='s,^\([^/]*\)/.*$$,\1,'; \ + sed_rest='s,^[^/]*/*,,'; \ + sed_last='s,^.*/\([^/]*\)$$,\1,'; \ + sed_butlast='s,/*[^/]*$$,,'; \ + while test -n "$$dir1"; do \ + first=`echo "$$dir1" | sed -e "$$sed_first"`; \ + if test "$$first" != "."; then \ + if test "$$first" = ".."; then \ + dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ + dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ + else \ + first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ + if test "$$first2" = "$$first"; then \ + dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ + else \ + dir2="../$$dir2"; \ + fi; \ + dir0="$$dir0"/"$$first"; \ + fi; \ + fi; \ + dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ + done; \ + reldir="$$dir2" DIST_ARCHIVES = $(distdir).tar.gz GZIP_ENV = --best distuninstallcheck_listfiles = find . -type f -print +am__distuninstallcheck_listfiles = $(distuninstallcheck_listfiles) \ + | sed 's|^\./|$(prefix)/|' | grep -v '$(infodir)/dir$$' distcleancheck_listfiles = find . -type f -print ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ @@ -128,7 +203,11 @@ FGREP = @FGREP@ FLAGS = @FLAGS@ GREP = @GREP@ -HELP2MAN_EXECUTABLE = @HELP2MAN_EXECUTABLE@ +HAS_HELP2MAN = @HAS_HELP2MAN@ +HAS_INSTALLINFO = @HAS_INSTALLINFO@ +HAS_MAKEINFO = @HAS_MAKEINFO@ +HAS_PDFLATEX = @HAS_PDFLATEX@ +HAS_SPHINX_BUILD = @HAS_SPHINX_BUILD@ ICU_BIN = @ICU_BIN@ ICU_CFLAGS = @ICU_CFLAGS@ ICU_CONFIG = @ICU_CONFIG@ @@ -141,10 +220,7 @@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ -JS185_CFLAGS = @JS185_CFLAGS@ -JS185_LIBS = @JS185_LIBS@ JS_CFLAGS = @JS_CFLAGS@ -JS_LDFLAGS = @JS_LDFLAGS@ JS_LIBS = @JS_LIBS@ JS_LIB_BINARY = @JS_LIB_BINARY@ LD = @LD@ @@ -156,6 +232,7 @@ LN_S = @LN_S@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ +MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ NM = @NM@ NMEDIT = @NMEDIT@ @@ -168,6 +245,7 @@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ +PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PKG_CONFIG = @PKG_CONFIG@ @@ -186,6 +264,7 @@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ +ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ @@ -254,6 +333,7 @@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ +top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ version = @version@ @@ -267,13 +347,11 @@ localdoc_DATA = \ AUTHORS.gz \ BUGS.gz \ - CHANGES.gz \ DEVELOPERS.gz \ INSTALL.gz \ INSTALL.Unix.gz \ INSTALL.Windows.gz \ LICENSE.gz \ - NEWS.gz \ NOTICE.gz \ README.gz \ THANKS.gz @@ -282,37 +360,38 @@ EXTRA_DIST = \ AUTHORS \ BUGS \ - CHANGES \ DEVELOPERS \ INSTALL \ INSTALL.Unix \ INSTALL.Windows \ LICENSE \ - NEWS \ NOTICE \ - README \ + README.rst \ THANKS \ + build-aux/dist-error \ + build-aux/sphinx-build \ + build-aux/sphinx-touch \ license.skip all: config.h $(MAKE) $(AM_MAKEFLAGS) all-recursive .SUFFIXES: -am--refresh: +am--refresh: Makefile @: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ - echo ' cd $(srcdir) && $(AUTOMAKE) --foreign '; \ - cd $(srcdir) && $(AUTOMAKE) --foreign \ + echo ' cd $(srcdir) && $(AUTOMAKE) --foreign'; \ + $(am__cd) $(srcdir) && $(AUTOMAKE) --foreign \ && exit 0; \ exit 1;; \ esac; \ done; \ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \ - cd $(top_srcdir) && \ - $(AUTOMAKE) --foreign Makefile + echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \ + $(am__cd) $(top_srcdir) && \ + $(AUTOMAKE) --foreign Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ @@ -328,29 +407,26 @@ $(SHELL) ./config.status --recheck $(top_srcdir)/configure: $(am__configure_deps) - cd $(srcdir) && $(AUTOCONF) + $(am__cd) $(srcdir) && $(AUTOCONF) $(ACLOCAL_M4): $(am__aclocal_m4_deps) - cd $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS) + $(am__cd) $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS) +$(am__aclocal_m4_deps): config.h: stamp-h1 - @if test ! -f $@; then \ - rm -f stamp-h1; \ - $(MAKE) $(AM_MAKEFLAGS) stamp-h1; \ - else :; fi + @if test ! -f $@; then rm -f stamp-h1; else :; fi + @if test ! -f $@; then $(MAKE) $(AM_MAKEFLAGS) stamp-h1; else :; fi stamp-h1: $(srcdir)/config.h.in $(top_builddir)/config.status @rm -f stamp-h1 cd $(top_builddir) && $(SHELL) ./config.status config.h $(srcdir)/config.h.in: $(am__configure_deps) - cd $(top_srcdir) && $(AUTOHEADER) + ($(am__cd) $(top_srcdir) && $(AUTOHEADER)) rm -f stamp-h1 touch $@ src/snappy/google-snappy/config.h: src/snappy/google-snappy/stamp-h2 - @if test ! -f $@; then \ - rm -f src/snappy/google-snappy/stamp-h2; \ - $(MAKE) $(AM_MAKEFLAGS) src/snappy/google-snappy/stamp-h2; \ - else :; fi + @if test ! -f $@; then rm -f src/snappy/google-snappy/stamp-h2; else :; fi + @if test ! -f $@; then $(MAKE) $(AM_MAKEFLAGS) src/snappy/google-snappy/stamp-h2; else :; fi src/snappy/google-snappy/stamp-h2: $(top_srcdir)/src/snappy/google-snappy/config.h.in $(top_builddir)/config.status @rm -f src/snappy/google-snappy/stamp-h2 @@ -368,24 +444,28 @@ -rm -rf .libs _libs distclean-libtool: - -rm -f libtool + -rm -f libtool config.lt install-localdocDATA: $(localdoc_DATA) @$(NORMAL_INSTALL) - test -z "$(localdocdir)" || $(MKDIR_P) "$(DESTDIR)$(localdocdir)" - @list='$(localdoc_DATA)'; for p in $$list; do \ + @list='$(localdoc_DATA)'; test -n "$(localdocdir)" || list=; \ + if test -n "$$list"; then \ + echo " $(MKDIR_P) '$(DESTDIR)$(localdocdir)'"; \ + $(MKDIR_P) "$(DESTDIR)$(localdocdir)" || exit 1; \ + fi; \ + for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ - f=$(am__strip_dir) \ - echo " $(localdocDATA_INSTALL) '$$d$$p' '$(DESTDIR)$(localdocdir)/$$f'"; \ - $(localdocDATA_INSTALL) "$$d$$p" "$(DESTDIR)$(localdocdir)/$$f"; \ + echo "$$d$$p"; \ + done | $(am__base_list) | \ + while read files; do \ + echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(localdocdir)'"; \ + $(INSTALL_DATA) $$files "$(DESTDIR)$(localdocdir)" || exit $$?; \ done uninstall-localdocDATA: @$(NORMAL_UNINSTALL) - @list='$(localdoc_DATA)'; for p in $$list; do \ - f=$(am__strip_dir) \ - echo " rm -f '$(DESTDIR)$(localdocdir)/$$f'"; \ - rm -f "$(DESTDIR)$(localdocdir)/$$f"; \ - done + @list='$(localdoc_DATA)'; test -n "$(localdocdir)" || list=; \ + files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ + dir='$(DESTDIR)$(localdocdir)'; $(am__uninstall_files_from_dir) # This directory's subdirectories are mostly independent; you can cd # into them and run `make' without going through this Makefile. @@ -394,7 +474,7 @@ # (which will cause the Makefiles to be regenerated when you run `make'); # (2) otherwise, pass the desired values on the `make' command line. $(RECURSIVE_TARGETS): - @failcom='exit 1'; \ + @fail= failcom='exit 1'; \ for f in x $$MAKEFLAGS; do \ case $$f in \ *=* | --[!k]*);; \ @@ -411,7 +491,7 @@ else \ local_target="$$target"; \ fi; \ - (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ + ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ @@ -419,7 +499,7 @@ fi; test -z "$$fail" $(RECURSIVE_CLEAN_TARGETS): - @failcom='exit 1'; \ + @fail= failcom='exit 1'; \ for f in x $$MAKEFLAGS; do \ case $$f in \ *=* | --[!k]*);; \ @@ -445,16 +525,16 @@ else \ local_target="$$target"; \ fi; \ - (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ + ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done && test -z "$$fail" tags-recursive: list='$(SUBDIRS)'; for subdir in $$list; do \ - test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) tags); \ + test "$$subdir" = . || ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) tags); \ done ctags-recursive: list='$(SUBDIRS)'; for subdir in $$list; do \ - test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) ctags); \ + test "$$subdir" = . || ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) ctags); \ done ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) @@ -462,14 +542,14 @@ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ - $(AWK) ' { files[$$0] = 1; } \ - END { for (i in files) print i; }'`; \ + $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ + END { if (nonempty) { for (i in files) print i; }; }'`; \ mkid -fID $$unique tags: TAGS TAGS: tags-recursive $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) - tags=; \ + set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ @@ -481,46 +561,50 @@ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ - tags="$$tags $$include_option=$$here/$$subdir/TAGS"; \ + set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ - $(AWK) ' { files[$$0] = 1; } \ - END { for (i in files) print i; }'`; \ - if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \ + $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ + END { if (nonempty) { for (i in files) print i; }; }'`; \ + shift; \ + if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ - $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ - $$tags $$unique; \ + if test $$# -gt 0; then \ + $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ + "$$@" $$unique; \ + else \ + $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ + $$unique; \ + fi; \ fi ctags: CTAGS CTAGS: ctags-recursive $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) - tags=; \ - here=`pwd`; \ list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ - $(AWK) ' { files[$$0] = 1; } \ - END { for (i in files) print i; }'`; \ - test -z "$(CTAGS_ARGS)$$tags$$unique" \ + $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ + END { if (nonempty) { for (i in files) print i; }; }'`; \ + test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ - $$tags $$unique + $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ - && cd $(top_srcdir) \ - && gtags -i $(GTAGS_ARGS) $$here + && $(am__cd) $(top_srcdir) \ + && gtags -i $(GTAGS_ARGS) "$$here" distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) $(am__remove_distdir) - test -d $(distdir) || mkdir $(distdir) + test -d "$(distdir)" || mkdir "$(distdir)" @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ @@ -536,29 +620,41 @@ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ + if test -d "$(distdir)/$$file"; then \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ + fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ - cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \ + cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ + find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ - cp -pR $$d/$$file $(distdir)$$dir || exit 1; \ + cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ - test -f $(distdir)/$$file \ - || cp -p $$d/$$file $(distdir)/$$file \ + test -f "$(distdir)/$$file" \ + || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done - list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ + @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ - test -d "$(distdir)/$$subdir" \ - || $(MKDIR_P) "$(distdir)/$$subdir" \ - || exit 1; \ - distdir=`$(am__cd) $(distdir) && pwd`; \ - top_distdir=`$(am__cd) $(top_distdir) && pwd`; \ - (cd $$subdir && \ + $(am__make_dryrun) \ + || test -d "$(distdir)/$$subdir" \ + || $(MKDIR_P) "$(distdir)/$$subdir" \ + || exit 1; \ + dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ + $(am__relativize); \ + new_distdir=$$reldir; \ + dir1=$$subdir; dir2="$(top_distdir)"; \ + $(am__relativize); \ + new_top_distdir=$$reldir; \ + echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ + echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ + ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ - top_distdir="$$top_distdir" \ - distdir="$$distdir/$$subdir" \ + top_distdir="$$new_top_distdir" \ + distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ + am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ @@ -566,17 +662,31 @@ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$(top_distdir)" distdir="$(distdir)" \ dist-hook - -find $(distdir) -type d ! -perm -755 -exec chmod u+rwx,go+rx {} \; -o \ + -test -n "$(am__skip_mode_fix)" \ + || find "$(distdir)" -type d ! -perm -755 \ + -exec chmod u+rwx,go+rx {} \; -o \ ! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \ ! -type d ! -perm -400 -exec chmod a+r {} \; -o \ ! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \ - || chmod -R a+r $(distdir) + || chmod -R a+r "$(distdir)" dist-gzip: distdir tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz $(am__remove_distdir) dist-bzip2: distdir - tardir=$(distdir) && $(am__tar) | bzip2 -9 -c >$(distdir).tar.bz2 + tardir=$(distdir) && $(am__tar) | BZIP2=$${BZIP2--9} bzip2 -c >$(distdir).tar.bz2 + $(am__remove_distdir) + +dist-lzip: distdir + tardir=$(distdir) && $(am__tar) | lzip -c $${LZIP_OPT--9} >$(distdir).tar.lz + $(am__remove_distdir) + +dist-lzma: distdir + tardir=$(distdir) && $(am__tar) | lzma -9 -c >$(distdir).tar.lzma + $(am__remove_distdir) + +dist-xz: distdir + tardir=$(distdir) && $(am__tar) | XZ_OPT=$${XZ_OPT--e} xz -c >$(distdir).tar.xz $(am__remove_distdir) dist-tarZ: distdir @@ -602,25 +712,34 @@ distcheck: dist case '$(DIST_ARCHIVES)' in \ *.tar.gz*) \ - GZIP=$(GZIP_ENV) gunzip -c $(distdir).tar.gz | $(am__untar) ;;\ + GZIP=$(GZIP_ENV) gzip -dc $(distdir).tar.gz | $(am__untar) ;;\ *.tar.bz2*) \ - bunzip2 -c $(distdir).tar.bz2 | $(am__untar) ;;\ + bzip2 -dc $(distdir).tar.bz2 | $(am__untar) ;;\ + *.tar.lzma*) \ + lzma -dc $(distdir).tar.lzma | $(am__untar) ;;\ + *.tar.lz*) \ + lzip -dc $(distdir).tar.lz | $(am__untar) ;;\ + *.tar.xz*) \ + xz -dc $(distdir).tar.xz | $(am__untar) ;;\ *.tar.Z*) \ uncompress -c $(distdir).tar.Z | $(am__untar) ;;\ *.shar.gz*) \ - GZIP=$(GZIP_ENV) gunzip -c $(distdir).shar.gz | unshar ;;\ + GZIP=$(GZIP_ENV) gzip -dc $(distdir).shar.gz | unshar ;;\ *.zip*) \ unzip $(distdir).zip ;;\ esac - chmod -R a-w $(distdir); chmod a+w $(distdir) + chmod -R a-w $(distdir); chmod u+w $(distdir) mkdir $(distdir)/_build mkdir $(distdir)/_inst chmod a-w $(distdir) + test -d $(distdir)/_build || exit 0; \ dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \ && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \ && $(MAKE) $(AM_MAKEFLAGS) distcheck-hook \ - && cd $(distdir)/_build \ + && am__cwd=`pwd` \ + && $(am__cd) $(distdir)/_build \ && ../configure --srcdir=.. --prefix="$$dc_install_base" \ + $(AM_DISTCHECK_CONFIGURE_FLAGS) \ $(DISTCHECK_CONFIGURE_FLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) dvi \ @@ -641,14 +760,24 @@ && rm -rf "$$dc_destdir" \ && $(MAKE) $(AM_MAKEFLAGS) dist \ && rm -rf $(DIST_ARCHIVES) \ - && $(MAKE) $(AM_MAKEFLAGS) distcleancheck + && $(MAKE) $(AM_MAKEFLAGS) distcleancheck \ + && cd "$$am__cwd" \ + || exit 1 $(am__remove_distdir) @(echo "$(distdir) archives ready for distribution: "; \ list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \ sed -e 1h -e 1s/./=/g -e 1p -e 1x -e '$$p' -e '$$x' distuninstallcheck: - @cd $(distuninstallcheck_dir) \ - && test `$(distuninstallcheck_listfiles) | wc -l` -le 1 \ + @test -n '$(distuninstallcheck_dir)' || { \ + echo 'ERROR: trying to run $@ with an empty' \ + '$$(distuninstallcheck_dir)' >&2; \ + exit 1; \ + }; \ + $(am__cd) '$(distuninstallcheck_dir)' || { \ + echo 'ERROR: cannot chdir into $(distuninstallcheck_dir)' >&2; \ + exit 1; \ + }; \ + test `$(am__distuninstallcheck_listfiles) | wc -l` -eq 0 \ || { echo "ERROR: files left after uninstall:" ; \ if test -n "$(DESTDIR)"; then \ echo " (check DESTDIR support)"; \ @@ -682,16 +811,22 @@ installcheck: installcheck-recursive install-strip: - $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ - install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ - `test -z '$(STRIP)' || \ - echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install + if test -z '$(STRIP)'; then \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + install; \ + else \ + $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ + install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ + "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ + fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) + -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) -test -z "$(DISTCLEANFILES)" || rm -f $(DISTCLEANFILES) maintainer-clean-generic: @@ -713,6 +848,8 @@ html: html-recursive +html-am: + info: info-recursive info-am: @@ -720,21 +857,30 @@ install-data-am: install-localdocDATA @$(NORMAL_INSTALL) $(MAKE) $(AM_MAKEFLAGS) install-data-hook - install-dvi: install-dvi-recursive +install-dvi-am: + install-exec-am: install-html: install-html-recursive +install-html-am: + install-info: install-info-recursive +install-info-am: + install-man: install-pdf: install-pdf-recursive +install-pdf-am: + install-ps: install-ps-recursive +install-ps-am: + installcheck-am: maintainer-clean: maintainer-clean-recursive @@ -757,86 +903,87 @@ uninstall-am: uninstall-localdocDATA -.MAKE: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) install-am \ - install-data-am install-strip +.MAKE: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) all \ + ctags-recursive install-am install-data-am install-strip \ + tags-recursive .PHONY: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) CTAGS GTAGS \ all all-am am--refresh check check-am clean clean-generic \ clean-libtool ctags ctags-recursive dist dist-all dist-bzip2 \ - dist-gzip dist-hook dist-shar dist-tarZ dist-zip distcheck \ - distclean distclean-generic distclean-hdr distclean-libtool \ - distclean-local distclean-tags distcleancheck distdir \ - distuninstallcheck dvi dvi-am html html-am info info-am \ - install install-am install-data install-data-am \ - install-data-hook install-dvi install-dvi-am install-exec \ - install-exec-am install-html install-html-am install-info \ - install-info-am install-localdocDATA install-man install-pdf \ - install-pdf-am install-ps install-ps-am install-strip \ - installcheck installcheck-am installdirs installdirs-am \ - maintainer-clean maintainer-clean-generic mostlyclean \ - mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ - tags tags-recursive uninstall uninstall-am \ + dist-gzip dist-hook dist-lzip dist-lzma dist-shar dist-tarZ \ + dist-xz dist-zip distcheck distclean distclean-generic \ + distclean-hdr distclean-libtool distclean-local distclean-tags \ + distcleancheck distdir distuninstallcheck dvi dvi-am html \ + html-am info info-am install install-am install-data \ + install-data-am install-data-hook install-dvi install-dvi-am \ + install-exec install-exec-am install-html install-html-am \ + install-info install-info-am install-localdocDATA install-man \ + install-pdf install-pdf-am install-ps install-ps-am \ + install-strip installcheck installcheck-am installdirs \ + installdirs-am maintainer-clean maintainer-clean-generic \ + mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ + ps ps-am tags tags-recursive uninstall uninstall-am \ uninstall-localdocDATA -AUTHORS.gz: $(top_srcdir)/AUTHORS - -gzip -9 < $< > $@ +AUTHORS.gz: AUTHORS + gzip -9 < $< > $@ -BUGS.gz: $(top_srcdir)/BUGS - -gzip -9 < $< > $@ +BUGS.gz: BUGS + gzip -9 < $< > $@ -CHANGES.gz: $(top_srcdir)/CHANGES - -gzip -9 < $< > $@ +DEVELOPERS.gz: DEVELOPERS + gzip -9 < $< > $@ -DEVELOPERS.gz: $(top_srcdir)/DEVELOPERS - -gzip -9 < $< > $@ +INSTALL.gz: INSTALL + gzip -9 < $< > $@ -INSTALL.gz: $(top_srcdir)/INSTALL - -gzip -9 < $< > $@ +INSTALL.Unix.gz: INSTALL.Unix + gzip -9 < $< > $@ -INSTALL.Unix.gz: $(top_srcdir)/INSTALL.Unix - -gzip -9 < $< > $@ +INSTALL.Windows.gz: INSTALL.Windows + gzip -9 < $< > $@ -INSTALL.Windows.gz: $(top_srcdir)/INSTALL.Windows - -gzip -9 < $< > $@ +LICENSE.gz: LICENSE + gzip -9 < $< > $@ -LICENSE.gz: $(top_srcdir)/LICENSE - -gzip -9 < $< > $@ +NOTICE.gz: NOTICE + gzip -9 < $< > $@ -NEWS.gz: $(top_srcdir)/NEWS - -gzip -9 < $< > $@ +README.gz: README.rst + gzip -9 < $< > $@ -NOTICE.gz: $(top_srcdir)/NOTICE - -gzip -9 < $< > $@ +THANKS.gz: THANKS + gzip -9 < $< > $@ -README.gz: $(top_srcdir)/README - -gzip -9 < $< > $@ +check: dev check-js +@TESTS_TRUE@ $(top_builddir)/test/etap/run $(top_srcdir)/test/etap -THANKS.gz: $(top_srcdir)/THANKS - -gzip -9 < $< > $@ +check-js: dev +@TESTS_TRUE@@USE_CURL_TRUE@ MAKE=$(MAKE) $(top_builddir)/test/javascript/run -check: dev - $(top_builddir)/test/etap/run +check-etap: dev +@TESTS_TRUE@ $(top_builddir)/test/etap/run $(top_srcdir)/test/etap cover: dev - rm -f cover/*.coverdata - COVER=1 COVER_BIN=./src/couchdb/ $(top_builddir)/test/etap/run - SRC=./src/couchdb/ \ - $(ERL) -noshell \ - -pa src/etap \ - -eval 'etap_report:create()' \ - -s init stop > /dev/null 2>&1 +@TESTS_TRUE@ rm -f cover/*.coverdata +@TESTS_TRUE@ COVER=1 COVER_BIN=./src/couchdb/ $(top_builddir)/test/etap/run +@TESTS_TRUE@ SRC=./src/couchdb/ \ +@TESTS_TRUE@ $(ERL) -noshell \ +@TESTS_TRUE@ -pa src/etap \ +@TESTS_TRUE@ -eval 'etap_report:create()' \ +@TESTS_TRUE@ -s init stop > /dev/null 2>&1 dev: all - @echo "This command is intended for developers to use;" - @echo "it creates development ini files as well as a" - @echo "$(top_builddir)/tmp structure for development runtime files." - @echo "Use ./utils/run to launch CouchDB from the source tree." - mkdir -p $(top_builddir)/etc/couchdb/default.d - mkdir -p $(top_builddir)/etc/couchdb/local.d - mkdir -p $(top_builddir)/tmp/lib - mkdir -p $(top_builddir)/tmp/log - mkdir -p $(top_builddir)/tmp/run/couchdb +@TESTS_TRUE@ @echo "This command is intended for developers to use;" +@TESTS_TRUE@ @echo "it creates development ini files as well as a" +@TESTS_TRUE@ @echo "$(top_builddir)/tmp structure for development runtime files." +@TESTS_TRUE@ @echo "Use ./utils/run to launch CouchDB from the source tree." +@TESTS_TRUE@ mkdir -p $(top_builddir)/etc/couchdb/default.d +@TESTS_TRUE@ mkdir -p $(top_builddir)/etc/couchdb/local.d +@TESTS_TRUE@ mkdir -p $(top_builddir)/tmp/lib +@TESTS_TRUE@ mkdir -p $(top_builddir)/tmp/log +@TESTS_TRUE@ mkdir -p $(top_builddir)/tmp/run/couchdb install-data-hook: @echo @@ -846,6 +993,8 @@ rm -fr $(top_builddir)/etc/couchdb/default.d rm -fr $(top_builddir)/etc/couchdb/local.d rm -fr $(top_builddir)/tmp + rm -f $(top_builddir)/couchdb.stdout + rm -f $(top_builddir)/couchdb.stderr .PHONY: local-clean local-clean: maintainer-clean @@ -866,21 +1015,13 @@ find $(top_builddir) -type f -name "._*" -exec rm -f {} \; distcheck-hook: + $(MAKE) -C bin distcheck-hook + $(MAKE) -C src/couchdb/priv distcheck-hook + $(MAKE) -C share/doc/build distcheck-hook grep -rL 'http://www.apache.org/licenses/LICENSE-2.0' * \ - | grep -vEf license.skip; \ - test "$$?" -eq 1 + | grep --include= -vEf $(top_srcdir)/license.skip; \ + test "$$?" -eq 1 -.PHONY: distsign -distsign: distcheck check - @# @@ unpack archive and run diff -r to double check missing files - @# @@ does automake have anything that does this? - gpg --armor --detach-sig --default-key 8FBFCFBF \ - < $(top_srcdir)/$(distdir).tar.gz \ - > $(top_srcdir)/$(distdir).tar.gz.asc - md5sum $(top_srcdir)/$(distdir).tar.gz \ - > $(top_srcdir)/$(distdir).tar.gz.md5 - sha1sum $(top_srcdir)/$(distdir).tar.gz \ - > $(top_srcdir)/$(distdir).tar.gz.sha # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: diff -Nru couchdb-1.2.0/NEWS couchdb-1.4.0~rc.1/NEWS --- couchdb-1.2.0/NEWS 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/NEWS 1969-12-31 19:00:00.000000000 -0500 @@ -1,418 +0,0 @@ -Apache CouchDB NEWS -=================== - -For details about backwards incompatible changes, see: - - http://wiki.apache.org/couchdb/Breaking_changes - -Each release section notes when backwards incompatible changes have been made. - -Version 1.2.0 -------------- - -This release contains backwards incompatible changes. - - * Updated bundled erlang_oauth library to the latest version. - * cURL is no longer required to build CouchDB as it is only - required by the command line JS test runner. - * Added a native JSON parser. - * Optional file compression (database and view index files). This feature - is enabled by default. - * Several performance improvements, especially regarding database - writes and view indexing. - * Added a `data_size` property to database and view group - information URIs. - * Added support for automatic compaction. This feature is disabled - by default, but it can be enabled via the .ini configuration. - * A new replicator implementation that offers more performance - and configuration options. - * Added optional field `since_seq` to replication objects/documents. - * Simpler replication cancelation. - * The _active_tasks API now exposes more granular fields for each - task type. - * Futon's `Status` screen (active tasks) now displays two new task - status fields: `Started on` and `Updated on`. - * Added built-in changes feed filter `_view`. - * Fixed old index file descriptor leaks after a view cleanup. - * Performance improvements for the built-in changes feed filters - `_doc_ids` and `_design`. - * Fixes to the `_changes` feed heartbeat option when combined with - a filter. It affected continuous pull replications with a filter. - * Fix use of OAuth with VHosts and URL rewriting. - * OAuth secrets can now be stored in the users system database. - * Documents in the _users database can no longer be read by everyone. - * Confidential information in the _replication database can no longer - be read by everyone. - * Password hashes are now calculated by CouchDB instead of the client. - * Allow persistent authentication cookies. - * The requested_path property of query server request objects now has - the path requested by clients before VHosts and rewriting. - * Fixed incorrect reduce query results when using pagination parameters. - * Made icu_driver work with Erlang R15B and later. - * Improvements to the build system and etap test suite. - * Avoid invalidating view indexes when running out of file descriptors. - * Log correct stacktrace in all cases. - * Improvements to log messages for file-related errors. - -Version 1.1.1 -------------- - -* Support SpiderMonkey 1.8.5 -* Add configurable maximum to the number of bytes returned by _log. -* Allow CommonJS modules to be an empty string. -* Bump minimum Erlang version to R13B02. -* Do not run deleted validate_doc_update functions. -* ETags for views include current sequence if include_docs=true. -* Fix bug where duplicates can appear in _changes feed. -* Fix bug where update handlers break after conflict resolution. -* Fix bug with _replicator where include "filter" could crash couch. -* Fix crashes when compacting large views. -* Fix file descriptor leak in _log -* Fix missing revisions in _changes?style=all_docs. -* Improve handling of compaction at max_dbs_open limit. -* JSONP responses now send "text/javascript" for Content-Type. -* Link to ICU 4.2 on Windows. -* Permit forward slashes in path to update functions. -* Reap couchjs processes that hit reduce_overflow error. -* Status code can be specified in update handlers. -* Support provides() in show functions. -* _view_cleanup when ddoc has no views now removes all index files. -* max_replication_retry_count now supports "infinity". -* Fix replication crash when source database has a document with empty ID. -* Fix deadlock when assigning couchjs processes to serve requests. -* Fixes to the document multipart PUT API. -* Fixes regarding file descriptor leaks for databases with views. - -Version 1.1.0 -------------- - -All NEWS for 1.0.2 also apply to 1.1.0. - -This release contains backwards incompatible changes. - - * Native SSL support. - * Added support for HTTP range requests for attachments. - * Added built-in filters for `_changes`: `_doc_ids` and `_design`. - * Added configuration option for TCP_NODELAY aka "Nagle". - * Allow wildcards in vhosts definitions. - * More granular ETag support for views. - * More flexible URL rewriter. - * Added OS Process module to manage daemons outside of CouchDB. - * Added HTTP Proxy handler for more scalable externals. - * Added `_replicator` database to manage replications. - * Multiple micro-optimizations when reading data. - * Added CommonJS support to map functions. - * Added `stale=update_after` query option that triggers a view update after - returning a `stale=ok` response. - * More explicit error messages when it's not possible to access a file due - to lack of permissions. - * Added a "change password"-feature to Futon. - -Version 1.0.3 -------------- - - * Fixed compatibility issues with Erlang R14B02. - * Fix bug that allows invalid UTF-8 after valid escapes. - * Enabled replication over IPv6. - * Fixed for crashes in continuous and filtered changes feeds. - * Changes feeds now honor conflicts=true parameter. - * Fixed error when restarting replications in OTP R14B02. - * Fixed error with filter replication with a limit of 1. - * Upgrade ibrowse to version 2.2.0. - * Fixed OAuth signature computation in OTP R14B02. - * Handle passwords with : in them. - * Made compatible with jQuery 1.5.x. - * Added support for inclusive_end wiht reduce views. - * Etap tests no longer require use of port 5984. - * Windows builds now require ICU >= 4.4.0 and Erlang >= R14B03. - -Version 1.0.2 -------------- - - * Make test suite work with Safari and Chrome. - * Fix leaking file handles after compacting databases and views. - * Fix databases forgetting their validation function after compaction. - * Fix occasional timeout errors. - * Reduce lengthy stack traces. - * Allow logging of native types. - * Updated ibrowse library to 2.1.2 fixing numerous replication issues. - * Fix authenticated replication of design documents with attachments. - * Fix multipart GET APIs by always sending attachments in compressed - form when the source attachment is compressed on disk. Fixes a possible - edge case when an attachment underwent local-local replication. - * Various fixes to make replicated more resilient for edge-cases. - * Don't trigger a view update when requesting `_design/doc/_info`. - * Fix for circular references in CommonJS requires. - * Fix for frequently edited documents in multi-master deployments being - duplicated in _changes and _all_docs. - * Fix spurious conflict generation during attachment uploads. - * Fix for various bugs in Futon. - -Version 1.0.1 -------------- - - * Fix data corruption bug COUCHDB-844. Please see - http://couchdb.apache.org/notice/1.0.1.html for details. - * Added support for replication via an HTTP/HTTPS proxy. - * Fixed various replicator bugs for interop with older CouchDB versions. - * Show fields saved along with _deleted=true. Allows for auditing of deletes. - * Enable basic-auth popup when required to access the server, to prevent - people from getting locked out. - * User interface element for querying stale (cached) views. - -Version 1.0.0 -------------- - - * More efficient header commits. - * Use O_APPEND to save lseeks. - * Faster implementation of pread_iolist(). Further improves performance on - concurrent reads. - * Added authentication caching - * Faster default view collation. - * Added option to include update_seq in view responses. - -Version 0.11.2 --------------- - - * Replicator buxfixes for replicating design documents from secured databases. - * Better error messages on invalid URL requests. - * User documents can now be deleted by admins or the user. - * Avoid potential DOS attack by guarding all creation of atoms. - * Some Futon and JavaScript library bugfixes. - * Fixed CVE-2010-2234: Apache CouchDB Cross Site Request Forgery Attack - -Version 0.11.1 --------------- - - * Mask passwords in active tasks and logging. - * Update mochijson2 to allow output of BigNums not in float form. - * Added support for X-HTTP-METHOD-OVERRIDE. - * Disable jsonp by default. - * Accept gzip encoded standalone attachments. - * Made max_concurrent_connections configurable. - * Added continuous replication option to Futon. - * Added option to replicating test results anonymously to a community - CouchDB instance. - * Allow creation and deletion of config entries in Futon. - * Fixed various UI issues in Futon. - * Fixed compatibility with the Erlang R14 series. - * Fixed warnings on Linux builds. - * Fixed build error when aclocal needs to be called during the build. - * Require ICU 4.3.1. - * Fixed compatibility with Solaris. - * Added authentication redirect URL to log in clients. - * Added authentication caching, to avoid repeated opening and closing of the - users database for each request requiring authentication. - * Made authentication timeout configurable. - * Temporary views are now admin-only resources. - * Don't require a revpos for attachment stubs. - * Make file deletions async to avoid pauses during compaction and db - deletion. - * Fixed for wrong offset when writing headers and converting them to blocks, - only triggered when header is larger than 4k. - * Preserve _revs_limit and instance_start_time after compaction. - * Fixed timeout with large .ini files. - * Added tests for couch.js and jquery.couch.js - * Added various API features to jquery.couch.js - * Faster default view collation. - * Upgrade CommonJS modules support to 1.1.1. - * Added option to include update_seq in view responses. - * Fixed erlang filter funs and normalize filter fun API. - * Fixed hang in view shutdown. - * Refactored various internal APIs related to attachment streaming. - * Fixed hanging replication. - * Fixed keepalive issue. - * Allow global rewrites so system defaults are available in vhosts. - * Allow isolation of databases with vhosts. - * Made the test suite overall more reliable. - -Version 0.11.0 --------------- - -This version is a feature-freeze release candidate for Apache CouchDB 1.0. - - * Fixed CVE-2010-0009: Apache CouchDB Timing Attack Vulnerability. - * Added support for building a Windows installer as part of 'make dist'. - * Added optional 'raw' binary collation for faster view builds where Unicode - collation is not important. - * Improved view index build time by reducing ICU collation callouts. - * Added option to implicitly create replication target databases. - * Improved view information objects. - * Bug fix for partial updates during view builds. - * Bug fix for building couch.app's module list. - * Fixed a problem with statistics timers and system sleep. - * Improved the statistics calculations to use an online moving window - algorithm. - * Adds batching of multiple updating requests, to improve throughput with many - writers. - * Removed the now redundant couch_batch_save module. - * Bug fix for premature termination of chunked responses. - * Improved speed and concurrency of config lookups. - * Fixed an edge case for HTTP redirects during replication. - * Fixed HTTP timeout handling for replication. - * Fixed query parameter handling in OAuth'd replication. - * Fixed a bug preventing mixing languages with lists and views. - * Avoid OS process leaks in lists. - * Avoid leaking file descriptors on automatic replication restarts. - * Various improvements to the Futon UI. - * Provide Content-MD5 header support for attachments. - * Added default cookie-authentication and users db. - * Added per-db reader access control lists. - * Added per-db security object for configuration data in validation functions. - * Added URL Rewriter handler. - * Added proxy authentication handler. - * Added ability to replicate documents by id. - * Added virtual host handling. - * Uses json2.js for JSON serialization compatiblity with native JSON. - * Fixed CVE-2010-0009: Apache CouchDB Timing Attack Vulnerability. - -Version 0.10.2 --------------- - - * Fixed CVE-2010-0009: Apache CouchDB Timing Attack Vulnerability. - -Version 0.10.1 --------------- - - * Fixed test suite to work with build system. - * Fixed a problem with statistics timers and system sleep. - * Fixed an edge case for HTTP redirects during replication. - * Fixed HTTP timeout handling for replication. - * Fixed query parameter handling in OAuth'd replication. - * Fixed a bug preventing mixing languages with lists and views. - * Avoid OS process leaks in lists. - -Version 0.10.0 --------------- - -This release contains backwards incompatible changes - - * General performance improvements. - * View index generation speedups. - * Even more robust storage format. - * Native Erlang Views for high-performance applications. - * More robust push and pull replication. - * Two-legged OAuth support for applications and replication (three-legged in - preparation). - * Cookie authentication. - * API detail improvements. - * Better RFC 2616 (HTTP 1.1) compliance. - * Added modular configuration file directories. - * Miscellaneous improvements to build, system integration, and portability. - -Version 0.9.2 -------------- - - * Remove branch callbacks to allow building couchjs against newer versions of - Spidermonkey. - * Fix replication with 0.10 servers initiated by an 0.9 server. - -Version 0.9.1 -------------- - - * Various bug fixes for the build system, configuration, statistics reporting, - database core, external handlers, Futon interface, HTTP interface, - JavaScript View Server and replicator. - -Version 0.9.0 -------------- - -This release contains backwards incompatible changes. - - * Modular configuration. - * Performance enhancements for document and view access. - * More resilient replication process. - * Replication streams binary attachments. - * Administrator role and basic authentication. - * Document validation functions in design documents. - * Show and list functions for rendering documents and views as developer - controlled content-types. - * External process server module. - * Attachment uploading from Futon. - * Etags for views, lists, shows, document and attachment requests. - * Miscellaneous improvements to build, system integration, and portability. - -Version 0.8.1-incubating ------------------------- - - * Various bug fixes for replication, compaction, the HTTP interface and the - JavaScript View Server. - -Version 0.8.0-incubating ------------------------- - -This release contains backwards incompatible changes. - - * Changed core licensing to the Apache Software License 2.0. - * Refactoring of the core view and storage engines. - * Added support for incremental map/reduce views. - * Changed database file format. - * Many improvements to Futon, the web administration interface. - * Miscellaneous improvements to build, system integration, and portability. - * Swapped out Erlang's inets HTTP server for the Mochiweb HTTP server. - * SpiderMonkey is no longer included with CouchDB, but rather treated as an - external dependency. - * Added bits of awesome. - -Version 0.7.2 -------------- - - * Small changes to build process and `couchdb` command. - * Database server official port is now 5984 TCP/UDP instead of 8888. - -Version 0.7.1 -------------- - - * Small compatibility issue with Firefox 3 fixed. - -Version 0.7.0 -------------- - - * Infrastructure rewritten to use the GNU build system for portability. - * The built-in database browsing tool has been rewritten to provide a much - nicer interface for interacting directly with CouchDB from your web browser. - * XML and Fabric have been replaced with JSON and JavaScript for data - transport and View definitions. - -Version 0.6.0 -------------- - - * A replication facility is now available. - * CouchPeek can now create, delete and view documents. - * Building from source is easier and less error prone. - -Version 0.5.0 -------------- - - * A built-in CouchPeek utility. - * A full install kit buildable from a single command. - * A new GNU/Linux version is available. An OS X version is coming soon. - -Version 0.4.0 -------------- - - * Non-existent variables are now nil lists. - * Couch error codes and messages are no longer sent in the HTTP fields, - instead they are exclusively returned in the XML body. This is to avoid HTTP - header parsing problems with oddly formed error messages. - * Returned error messages are now logged at the server at the `info` level to - make general debugging easier. - * Fixed a problem where big table builds caused timeout errors. - * Lots of changes in the low level machinery. Most formulas will continue to - function the same. - * Added full compiler support for extended characters in formula source. - * Support for Perl/Ruby like regular expressions. - * Added `total_rows` and `result_start` attributes to tables. - -Version 0.3.0 -------------- - - * CouchDB now fully supports Unicode and locale specific collation via the ICU - library, both in the Fabric engine and computed tables. - * The `in` operator has been added to Fabric. - * The `startdoc` query string variable specifies the starting document to use - if there are multiple rows with identical startkeys. - * The `skip` query string variable specifies the number of rows to skip before - returning results. The `skip` value must be a positive integer. If used with - a `count` variable the skipped rows aren't counted as output. - * Various changes to the output XML format. diff -Nru couchdb-1.2.0/NOTICE couchdb-1.4.0~rc.1/NOTICE --- couchdb-1.2.0/NOTICE 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/NOTICE 2013-08-23 10:57:21.000000000 -0400 @@ -1,5 +1,5 @@ Apache CouchDB -Copyright 2009-2012 The Apache Software Foundation +Copyright 2009-2013 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). @@ -18,9 +18,9 @@ Copyright 2004, Scott James Remnant - * jQuery (http://jquery.com/) + * jQuery (http://jquery.org/) - Copyright 2011, John Resig + Copyright 2012 jQuery Foundation and other contributors * jQuery UI (http://jqueryui.com) @@ -28,7 +28,7 @@ * json2.js (http://www.json.org/) - In the public domain + Public domain * MochiWeb (http://code.google.com/p/mochiweb/) @@ -36,11 +36,11 @@ * ibrowse (http://github.com/cmullaparthi/ibrowse/tree/master) - Copyright 2009, Chandrashekhar Mullaparthi + Copyright 2005-2012, Chandrashekhar Mullaparthi - * Erlang OAuth (http://github.com/tim/erlang-oauth/tree/master) + * Erlang OAuth (http://github.com/tim/erlang-oauth) - Copyright 2009, Tim Fletcher + Copyright 2012, the authors and contributors * ETap (http://github.com/ngerakines/etap/) @@ -64,7 +64,7 @@ * snappy (http://code.google.com/p/snappy/) - Copyright 2005 and onwards Google Inc. + Copyright 2005, Google Inc. * snappy-erlang-nif (https://github.com/fdmanana/snappy-erlang-nif) @@ -73,3 +73,91 @@ * CoffeeScript (http://coffeescript.org/) Copyright 2011, Jeremy Ashkenas + + * Sphinx (http://sphinx-doc.org/) + + Copyright 2011, the Sphinx team + + * Sizzle (http://sizzlejs.com/) + + Copyright 2010, The Dojo Foundation + + * Underscore.js 1.4.2 (http://underscorejs.org) + + Copyright 2012, Jeremy Ashkenas + + * almond.js (http://github.com/jrburke/almond) + + Copyright 2011, The Dojo Foundation + + * backbone.js (http://backbonejs.org/) + + Copyright 2012, Jeremy Ashkenas, DocumentCloud Inc. + + * Bootstrap (http://twitter.github.com/bootstrap/) + + Copyright 2012, Twitter, Inc. + + * d3.js (http://d3js.org) + + Copyright 2012, Michael Bostock + + * JSHint (http://jshint.com/) + + Copyright 2002, Douglas Crockford, modifications by JSHint Community + + * Lodash (http://lodash.com/) + + Copyright 2012, John-David Dalton + + * nvd3.js (http://nvd3.org/) + + Copyright 2012, Novus Partners, Inc. + + * backbone.layoutmanager.js (https://github.com/tbranyen/backbone.layoutmanager) + + Copyright 2012, Tim Branyen (@tbranyen) + + * prettify.js (http://code.google.com/p/google-code-prettify/) + + Copyright 2011, Mike Samuel et al + + * PouchDB (https://github.com/daleharvey/pouchdb) + + Copyright 2012, Dale Harvey et al + + * require.js (https://github.com/jrburke/requirejs) + + Copyright (c) 2010-2011, The Dojo Foundation + + * codemirror.js (https://github.com/marijnh/CodeMirror) + + Copyright (C) 2013 by Marijn Haverbeke + + * codemirror-javascript.js (https://github.com/marijnh/CodeMirror) + + Copyright (C) 2013 by Marijn Haverbeke + + * jquery.form.js (https://github.com/malsup/form/) + + Copyright 2006-2013 (c) M. Alsup + + * couch_dbupdates + + Copyright 2012, Benoît Chesneau + +* mocha.js (https://github.com/visionmedia/mocha) + + Copyright (c) 2011-2013 TJ Holowaychuk + + * chaijs https://github.com/chaijs + + Copyright (c) 2011-2013 Jake Luer jake@alogicalparadox.com + + * sinon-chai + + Copyright © 2012–2013 Domenic Denicola + + * spin.js + + Copyright (c) 2011 Felix Gnass [fgnass at neteye dot de] diff -Nru couchdb-1.2.0/README couchdb-1.4.0~rc.1/README --- couchdb-1.2.0/README 2012-03-28 15:34:12.000000000 -0400 +++ couchdb-1.4.0~rc.1/README 1969-12-31 19:00:00.000000000 -0500 @@ -1,83 +0,0 @@ -Apache CouchDB README -===================== - -Installation ------------- - -For a low-level guide, see: - - INSTALL - -For a high-level guide to Unix-like systems, inc. Mac OS X and Ubuntu, see: - - INSTALL.Unix - -For a high-level guide to Microsoft Windows, see: - - INSTALL.Windows - -Follow the proper instructions to get CouchDB installed on your system. - -If you're having problems, skip to the next section. - -Troubleshooting ----------------- - -For troubleshooting, see: - - http://wiki.apache.org/couchdb/Troubleshooting - -If you're getting a cryptic error message, see: - - http://wiki.apache.org/couchdb/Error_messages - -For general help, see: - - http://couchdb.apache.org/community/lists.html - -The mailing lists provide a wealth of support and knowledge for you to tap into. -Feel free to drop by with your questions or discussion. See the official CouchDB -website for more information about our community resources. - -Running the Testsuite ---------------------- - -Run the testsuite for couch.js and jquery.couch.js by browsing here: - - http://127.0.0.1:5984/_utils/spec/run.html - -It should work in at least Firefox >= 3.6 with Private Browsing mode enabled. - -Read more about JSpec here: - - http://jspec.info/ - -When you change the specs, but your changes have no effect, manually reload -the changed spec file in the browser. When the spec that tests erlang views -fails, make sure you have enabled Erlang views as described here: - - http://wiki.apache.org/couchdb/EnableErlangViews - -Cryptographic Software Notice ------------------------------ - -This distribution includes cryptographic software. The country in which you -currently reside may have restrictions on the import, possession, use, and/or -re-export to another country, of encryption software. BEFORE using any -encryption software, please check your country's laws, regulations and policies -concerning the import, possession, or use, and re-export of encryption software, -to see if this is permitted. See for more -information. - -The U.S. Government Department of Commerce, Bureau of Industry and Security -(BIS), has classified this software as Export Commodity Control Number (ECCN) -5D002.C.1, which includes information security software using or performing -cryptographic functions with asymmetric algorithms. The form and manner of this -Apache Software Foundation distribution makes it eligible for export under the -License Exception ENC Technology Software Unrestricted (TSU) exception (see the -BIS Export Administration Regulations, Section 740.13) for both object code and -source code. - -The following provides more details on the included cryptographic software: - -CouchDB includes a HTTP client (ibrowse) with SSL functionality. diff -Nru couchdb-1.2.0/README.rst couchdb-1.4.0~rc.1/README.rst --- couchdb-1.2.0/README.rst 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/README.rst 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,95 @@ +Apache CouchDB README +===================== + +Installation +------------ + +For a low-level guide, see: + + INSTALL + +For a high-level guide to Unix-like systems, inc. Mac OS X and Ubuntu, see: + + INSTALL.Unix + +For a high-level guide to Microsoft Windows, see: + + INSTALL.Windows + +Follow the proper instructions to get CouchDB installed on your system. + +If you're having problems, skip to the next section. + +Documentation +------------- + +We have documentation: + + http://docs.couchdb.org/ + +They include a changelog: + + http://docs.couchdb.org/en/latest/changelog.html + +For troubleshooting, see: + + http://wiki.apache.org/couchdb/Troubleshooting + +If you're getting a cryptic error message, see: + + http://wiki.apache.org/couchdb/Error_messages + +For general help, see: + + http://couchdb.apache.org/#mailing-list + +We also have an IRC channel: + + http://webchat.freenode.net/?channels=couchdb + +The mailing lists provide a wealth of support and knowledge for you to tap into. +Feel free to drop by with your questions or discussion. See the official CouchDB +website for more information about our community resources. + +Running the Testsuite +--------------------- + +Run the testsuite for couch.js and jquery.couch.js by browsing here: + + http://127.0.0.1:5984/_utils/spec/run.html + +It should work in at least Firefox >= 3.6 with Private Browsing mode enabled. + +Read more about JSpec here: + + https://github.com/liblime/jspec + +When you change the specs, but your changes have no effect, manually reload +the changed spec file in the browser. When the spec that tests Erlang views +fails, make sure you have enabled Erlang views as described here: + + http://wiki.apache.org/couchdb/EnableErlangViews + +Cryptographic Software Notice +----------------------------- + +This distribution includes cryptographic software. The country in which you +currently reside may have restrictions on the import, possession, use, and/or +re-export to another country, of encryption software. BEFORE using any +encryption software, please check your country's laws, regulations and policies +concerning the import, possession, or use, and re-export of encryption software, +to see if this is permitted. See for more +information. + +The U.S. Government Department of Commerce, Bureau of Industry and Security +(BIS), has classified this software as Export Commodity Control Number (ECCN) +5D002.C.1, which includes information security software using or performing +cryptographic functions with asymmetric algorithms. The form and manner of this +Apache Software Foundation distribution makes it eligible for export under the +License Exception ENC Technology Software Unrestricted (TSU) exception (see the +BIS Export Administration Regulations, Section 740.13) for both object code and +source code. + +The following provides more details on the included cryptographic software: + +CouchDB includes a HTTP client (ibrowse) with SSL functionality. diff -Nru couchdb-1.2.0/share/doc/build/html/api/authn.html couchdb-1.4.0~rc.1/share/doc/build/html/api/authn.html --- couchdb-1.2.0/share/doc/build/html/api/authn.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/api/authn.html 2013-08-23 10:58:36.000000000 -0400 @@ -0,0 +1,222 @@ + + + + + + + + + + 9.7. Authentication Methods — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

9.7. Authentication Methods

+

The CouchDB Authentication methods provide an interface for obtaining +session and authorization data.

+

A list of the available methods and URL paths are provided below:

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
MethodPathDescription
GET/_oauth/access_tokenTBC
GET/_oauth/authorizeTBC
POST/_oauth/authorizeTBC
GET/_oauth/request_tokenTBC
GET/_sessionReturns cookie based login user +information
POST/_sessionDo cookie based user login
DELETE/_sessionLogout cookie based user
+
+ + +
+
+
+
+
+ + + + + +

Previous topic

+

9.6. Configuration Methods

+

Next topic

+

10. JSON Structure Reference

+ + +

Utilities

+ + + + +

More Help

+ + +
+
+
+
+ + + + \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/api/configuration.html couchdb-1.4.0~rc.1/share/doc/build/html/api/configuration.html --- couchdb-1.2.0/share/doc/build/html/api/configuration.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/api/configuration.html 2013-08-23 10:58:37.000000000 -0400 @@ -0,0 +1,467 @@ + + + + + + + + + + 9.6. Configuration Methods — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

9.6. Configuration Methods

+

The CouchDB API Server Configuration Methods provide an interface to +query and update the various configuration values within a running +CouchDB instance.

+

A list of the available methods and URL paths are provided below:

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
MethodPathDescription
GET/_configObtain a list of the entire server +configuration
GET/_config/sectionGet all the configuration values for the +specified section
GET/_config/section/keyGet a specific section/configuration value
PUT/_config/section/keySet the specified configuration value
DELETE/_config/section/keyDelete the current setting
+
+

9.6.1. GET /_config

+
    +
  • Method: GET /_config
  • +
  • Request: None
  • +
  • Response: Returns a structure configuration name and value pairs, +organized by section
  • +
  • Admin Privileges Required: yes
  • +
  • Return Codes:
      +
    • 200: +Request completed successfully.
    • +
    +
  • +
+

Returns the entire CouchDB server configuration as a JSON structure. The +structure is organized by different configuration sections, with +individual values.

+

For example, to get the configuration for a server:

+
GET http://couchdb:5984/_config
+Accept: application/json
+
+

The response is the JSON structure:

+
{
+   "query_server_config" : {
+      "reduce_limit" : "true"
+   },
+   "couchdb" : {
+      "os_process_timeout" : "5000",
+      "max_attachment_chunk_size" : "4294967296",
+      "max_document_size" : "4294967296",
+      "uri_file" : "/var/lib/couchdb/couch.uri",
+      "max_dbs_open" : "100",
+      "view_index_dir" : "/var/lib/couchdb",
+      "util_driver_dir" : "/usr/lib64/couchdb/erlang/lib/couch-1.0.1/priv/lib",
+      "database_dir" : "/var/lib/couchdb",
+      "delayed_commits" : "true"
+   },
+   "attachments" : {
+      "compressible_types" : "text/*, application/javascript, application/json,  application/xml",
+      "compression_level" : "8"
+   },
+   "uuids" : {
+      "algorithm" : "utc_random"
+   },
+   "daemons" : {
+      "view_manager" : "{couch_view, start_link, []}",
+      "auth_cache" : "{couch_auth_cache, start_link, []}",
+      "uuids" : "{couch_uuids, start, []}",
+      "stats_aggregator" : "{couch_stats_aggregator, start, []}",
+      "query_servers" : "{couch_query_servers, start_link, []}",
+      "httpd" : "{couch_httpd, start_link, []}",
+      "stats_collector" : "{couch_stats_collector, start, []}",
+      "db_update_notifier" : "{couch_db_update_notifier_sup, start_link, []}",
+      "external_manager" : "{couch_external_manager, start_link, []}"
+   },
+   "stats" : {
+      "samples" : "[0, 60, 300, 900]",
+      "rate" : "1000"
+   },
+   "httpd" : {
+      "vhost_global_handlers" : "_utils, _uuids, _session, _oauth, _users",
+      "secure_rewrites" : "true",
+      "authentication_handlers" : "{couch_httpd_oauth, oauth_authentication_handler},
+                                   {couch_httpd_auth, cookie_authentication_handler},
+                                   {couch_httpd_auth, default_authentication_handler}",
+      "port" : "5984",
+      "default_handler" : "{couch_httpd_db, handle_request}",
+      "allow_jsonp" : "false",
+      "bind_address" : "192.168.0.2",
+      "max_connections" : "2048"
+   },
+   "query_servers" : {
+      "javascript" : "/usr/bin/couchjs /usr/share/couchdb/server/main.js"
+   },
+   "couch_httpd_auth" : {
+      "authentication_db" : "_users",
+      "require_valid_user" : "false",
+      "authentication_redirect" : "/_utils/session.html",
+      "timeout" : "600",
+      "auth_cache_size" : "50"
+   },
+   "httpd_db_handlers" : {
+      "_design" : "{couch_httpd_db, handle_design_req}",
+      "_compact" : "{couch_httpd_db, handle_compact_req}",
+      "_view_cleanup" : "{couch_httpd_db, handle_view_cleanup_req}",
+      "_temp_view" : "{couch_httpd_view, handle_temp_view_req}",
+      "_changes" : "{couch_httpd_db, handle_changes_req}"
+   },
+   "replicator" : {
+      "max_http_sessions" : "10",
+      "max_http_pipeline_size" : "10"
+   },
+   "log" : {
+      "include_sasl" : "true",
+      "level" : "info",
+      "file" : "/var/log/couchdb/couch.log"
+   },
+   "httpd_design_handlers" : {
+      "_update" : "{couch_httpd_show, handle_doc_update_req}",
+      "_show" : "{couch_httpd_show, handle_doc_show_req}",
+      "_info" : "{couch_httpd_db,   handle_design_info_req}",
+      "_list" : "{couch_httpd_show, handle_view_list_req}",
+      "_view" : "{couch_httpd_view, handle_view_req}",
+      "_rewrite" : "{couch_httpd_rewrite, handle_rewrite_req}"
+   },
+   "httpd_global_handlers" : {
+      "_replicate" : "{couch_httpd_misc_handlers, handle_replicate_req}",
+      "/" : "{couch_httpd_misc_handlers, handle_welcome_req, <<\"Welcome\">>}",
+      "_config" : "{couch_httpd_misc_handlers, handle_config_req}",
+      "_utils" : "{couch_httpd_misc_handlers, handle_utils_dir_req, \"/usr/share/couchdb/www\"}",
+      "_active_tasks" : "{couch_httpd_misc_handlers, handle_task_status_req}",
+      "_session" : "{couch_httpd_auth, handle_session_req}",
+      "_log" : "{couch_httpd_misc_handlers, handle_log_req}",
+      "favicon.ico" : "{couch_httpd_misc_handlers, handle_favicon_req, \"/usr/share/couchdb/www\"}",
+      "_all_dbs" : "{couch_httpd_misc_handlers, handle_all_dbs_req}",
+      "_oauth" : "{couch_httpd_oauth, handle_oauth_req}",
+      "_restart" : "{couch_httpd_misc_handlers, handle_restart_req}",
+      "_uuids" : "{couch_httpd_misc_handlers, handle_uuids_req}",
+      "_stats" : "{couch_httpd_stats_handlers, handle_stats_req}"
+   }
+}
+
+
+
+
+

9.6.2. GET /_config/section

+
    +
  • Method: GET /_config/section
  • +
  • Request: None
  • +
  • Response: All the configuration values within a specified section
  • +
  • Admin Privileges Required: yes
  • +
  • Return Codes:
      +
    • 200: +Request completed successfully.
    • +
    +
  • +
+

Gets the configuration structure for a single section. For example, to +retrieve the CouchDB configuration section values:

+
GET http://couchdb:5984/_config/couchdb
+Accept: application/json
+
+

The returned JSON contains just the configuration values for this +section:

+
{
+   "os_process_timeout" : "5000",
+   "max_attachment_chunk_size" : "4294967296",
+   "max_document_size" : "4294967296",
+   "uri_file" : "/var/lib/couchdb/couch.uri",
+   "max_dbs_open" : "100",
+   "view_index_dir" : "/var/lib/couchdb",
+   "util_driver_dir" : "/usr/lib64/couchdb/erlang/lib/couch-1.0.1/priv/lib",
+   "database_dir" : "/var/lib/couchdb",
+   "delayed_commits" : "true"
+}
+
+
+
+
+

9.6.3. GET /_config/section/key

+
    +
  • Method: GET /_config/section/key
  • +
  • Request: None
  • +
  • Response: Value of the specified key/section
  • +
  • Admin Privileges Required: yes
  • +
  • Return Codes:
      +
    • 200: +Request completed successfully.
    • +
    +
  • +
+

Gets a single configuration value from within a specific configuration +section. For example, to obtain the current log level:

+
GET http://couchdb:5984/_config/log/level
+Accept: application/json
+
+

Returns the string of the log level:

+
"info"
+
+
+
+

Note

+

The returned value will be the JSON of the value, which may be a +string or numeric value, or an array or object. Some client +environments may not parse simple strings or numeric values as valid JSON.

+
+
+
+

9.6.4. PUT /_config/section/key

+
    +
  • Method: PUT /_config/section/key
  • +
  • Request: Value structure
  • +
  • Response: Previous value
  • +
  • Admin Privileges Required: yes
  • +
  • Return Codes:
      +
    • 200: +Configuration option updated successfully
    • +
    • 500: +Error setting configuration
    • +
    +
  • +
+

Updates a configuration value. The new value should be supplied in the +request body in the corresponding JSON format. For example, if you are +setting a string value, you must supply a valid JSON string.

+

For example, to set the function used to generate UUIDs by the +GET /_uuids API call to use the utc_random generator:

+
PUT http://couchdb:5984/_config/uuids/algorithm
+Content-Type: application/json
+
+"utc_random"
+
+

The return value will be empty, with the response code indicating the +success or failure of the configuration setting.

+
+
+

9.6.5. DELETE /_config/section/key

+
    +
  • Method: DELETE /_config/section/key
  • +
  • Request: None
  • +
  • Response: Previous value
  • +
  • Admin Privileges Required: yes
  • +
  • Return Codes:
      +
    • 409: +Supplied revision is incorrect or missing
    • +
    +
  • +
+

Deletes a configuration value. The returned JSON will be the value of +the configuration parameter before it was deleted. For example, to +delete the UUID parameter:

+
DELETE http://couchdb:5984/_config/uuids/algorithm
+Content-Type: application/json
+
+

The returned value is the last configured UUID function:

+
"random"
+
+
+
+
+ + +
+
+
+
+
+ + + + + +

Table Of Contents

+ + +

Previous topic

+

9.5. Miscellaneous Methods

+

Next topic

+

9.7. Authentication Methods

+ + +

Utilities

+ + + + +

More Help

+ + +
+
+
+
+ + + + \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/api/database.html couchdb-1.4.0~rc.1/share/doc/build/html/api/database.html --- couchdb-1.2.0/share/doc/build/html/api/database.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/api/database.html 2013-08-23 10:58:41.000000000 -0400 @@ -0,0 +1,1646 @@ + + + + + + + + + + 9.1. Database Methods — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

9.1. Database Methods

+

The Database methods provide an interface to an entire database withing +CouchDB. These are database, rather than document, level requests.

+

A list of the available methods and URL paths are provided below:

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
MethodPathDescription
GET/dbReturns database information
PUT/dbCreate a new database
DELETE/dbDelete an existing database
GET/db/_all_docsReturns a built-in view of all documents +in this database
POST/db/_all_docsReturns certain rows from the built-in +view of all documents
POST/db/_bulk_docsInsert multiple documents in to the +database in a single request
GET/db/_changesReturns changes for the given database
POST/db/_compactStarts a compaction for the database
POST/db/_compact/design-docStarts a compaction for all the views in +the selected design document
POST/db/_ensure_full_commitMakes sure all uncommitted changes are +written and synchronized to the disk
POST/db/_missing_revsGiven a list of document revisions, +returns the document revisions that do not +exist in the database
POST/db/_purgePurge some historical documents entirely +from database history
POST/db/_revs_diffGiven a list of document revisions, +returns differences between the given +revisions and ones that are in the +database
GET/db/_revs_limitGets the limit of historical revisions to +store for a single document in the +database
PUT/db/_revs_limitSets the limit of historical revisions to +store for a single document in the +database
GET/db/_securityReturns the special security object for +the database
PUT/db/_securitySets the special security object for the +database
POST/db/_temp_viewExecute a given view function for all +documents and return the result
POST/db/_view_cleanupRemoves view files that are not used by +any design document
+

For all the database methods, the database name within the URL path +should be the database name that you wish to perform the operation on. +For example, to obtain the meta information for the database +recipes, you would use the HTTP request:

+
GET /recipes
+
+

For clarity, the form below is used in the URL paths:

+
GET /db
+
+

Where db is the name of any database.

+
+

9.1.1. GET /db

+
    +
  • Method: GET /db
  • +
  • Request: None
  • +
  • Response: Information about the database in JSON format
  • +
  • Admin Privileges Required: no
  • +
  • Return Codes:
      +
    • 404: +The requested content could not be found. The returned content will include +further information, as a JSON object, if available.
    • +
    +
  • +
+

Gets information about the specified database. For example, to retrieve +the information for the database recipe:

+
GET http://couchdb:5984/recipes
+Accept: application/json
+
+

The JSON response contains meta information about the database. A sample +of the JSON returned for an empty database is provided below:

+
{
+   "compact_running" : false,
+   "committed_update_seq" : 375048,
+   "disk_format_version" : 5,
+   "disk_size" : 33153123,
+   "doc_count" : 18386,
+   "doc_del_count" : 0,
+   "db_name" : "recipes",
+   "instance_start_time" : "1290700340925570",
+   "purge_seq" : 10,
+   "update_seq" : 375048
+}
+
+
+

The elements of the returned structure are shown in the table below:

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FieldDescription
committed_update_seqThe number of committed update.
compact_runningSet to true if the database compaction +routine is operating on this database.
db_nameThe name of the database.
disk_format_versionThe version of the physical format used +for the data when it is stored on disk.
disk_sizeSize in bytes of the data as stored on the +disk. Views indexes are not included in +the calculation.
doc_countA count of the documents in the specified +database.
doc_del_countNumber of deleted documents
instance_start_timeTimestamp of when the database was +opened, expressed in microseconds since +the epoch.
purge_seqThe number of purge operations on the +database.
update_seqThe current number of updates to the +database.
+
+
+

9.1.2. PUT /db

+
    +
  • Method: PUT /db
  • +
  • Request: None
  • +
  • Response: JSON success statement
  • +
  • Admin Privileges Required: no
  • +
  • Return Codes:
      +
    • 400: +Invalid database name
    • +
    • 412: +Database already exists
    • +
    +
  • +
+

Creates a new database. The database name must be composed of one or +more of the following characters:

+
    +
  • Lowercase characters (a-z)
  • +
  • Name must begin with a lowercase letter
  • +
  • Digits (0-9)
  • +
  • Any of the characters _, $, (, ), +, -, and +/.
  • +
+

Trying to create a database that does not meet these requirements will +return an error quoting these restrictions.

+

To create the database recipes:

+
PUT http://couchdb:5984/recipes
+Content-Type: application/json
+
+

The returned content contains the JSON status:

+
{
+   "ok" : true
+}
+
+
+

Anything should be treated as an error, and the problem should be taken +form the HTTP response code.

+
+
+

9.1.3. DELETE /db

+
    +
  • Method: DELETE /db
  • +
  • Request: None
  • +
  • Response: JSON success statement
  • +
  • Admin Privileges Required: no
  • +
  • Return Codes:
      +
    • 200: +Database has been deleted
    • +
    • 404: +The requested content could not be found. The returned content will include +further information, as a JSON object, if available.
    • +
    +
  • +
+

Deletes the specified database, and all the documents and attachments +contained within it.

+

To delete the database recipes you would send the request:

+
DELETE http://couchdb:5984/recipes
+Content-Type: application/json
+
+

If successful, the returned JSON will indicate success

+
{
+   "ok" : true
+}
+
+
+
+
+

9.1.4. GET /db/_changes

+
    +
  • Method: GET /db/_changes
  • +
  • Request: None
  • +
  • Response: JSON success statement
  • +
  • Admin Privileges Required: no
  • +
  • Query Arguments:
      +
    • Argument: doc_ids
        +
      • Description: Specify the list of documents IDs to be filtered
      • +
      • Optional: yes
      • +
      • Type: json
      • +
      • Default: none
      • +
      +
    • +
    • Argument: feed
        +
      • Description: Type of feed
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      • Default: normal
      • +
      • Supported Values:
          +
        • continuous: Continuous (non-polling) mode
        • +
        • longpoll: Long polling mode
        • +
        • normal: Normal mode
        • +
        +
      • +
      +
    • +
    • Argument: filter
        +
      • Description: Filter function from a design document to get updates
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      • Default: none
      • +
      • Supported Values:
      • +
      +
    • +
    • Argument: heartbeat
        +
      • Description: Period after which an empty line is sent during longpoll +or continuous
      • +
      • Optional: yes
      • +
      • Type: numeric
      • +
      • Default: 60000
      • +
      • Quantity: milliseconds
      • +
      +
    • +
    • Argument: include_docs
        +
      • Description: Include the document with the result
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Default: false
      • +
      +
    • +
    • Argument: limit
        +
      • Description: Maximum number of rows rows to return
      • +
      • Optional: yes
      • +
      • Type: numeric
      • +
      • Default: none
      • +
      +
    • +
    • Argument: since
        +
      • Description: Start the results from changes immediately after the +specified sequence number
      • +
      • Optional: yes
      • +
      • Type: numeric
      • +
      • Default: 0
      • +
      +
    • +
    +
  • +
+

Obtains a list of the changes made to the database. This can be used to +monitor for update and modifications to the database for post processing +or synchronization. There are three different types of supported changes +feeds, poll, longpoll, and continuous. All requests are poll requests by +default. You can select any feed type explicitly using the feed +query argument.

+
    +
  • Poll

    +

    With polling you can request the changes that have occured since a +specific sequence number. This returns the JSON structure containing +the changed document information. When you perform a poll change +request, only the changes since the specific sequence number are +returned. For example, the query

    +
    DELETE http://couchdb:5984/recipes/_changes
    +Content-Type: application/json
    +
    +

    Will get all of the changes in the database. You can request a +starting point using the since query argument and specifying the +sequence number. You will need to record the latest sequence number +in your client and then use this when making another request as the +new value to the since parameter.

    +
  • +
  • Longpoll

    +

    With long polling the request to the server will remain open until a +change is made on the database, when the changes will be reported, +and then the connection will close. The long poll is useful when you +want to monitor for changes for a specific purpose without wanting to +monitoring continuously for changes.

    +

    Because the wait for a change can be significant you can set a +timeout before the connection is automatically closed (the +timeout argument). You can also set a heartbeat interval (using +the heartbeat query argument), which sends a newline to keep the +connection open.

    +
  • +
  • Continuous

    +

    Continuous sends all new changes back to the client immediately, +without closing the connection. In continuous mode the format of the +changes is slightly different to accommodate the continuous nature +while ensuring that the JSON output is still valid for each change +notification.

    +

    As with the longpoll feed type you can set both the timeout and +heartbeat intervals to ensure that the connection is kept open for +new changes and updates.

    +
  • +
+

The return structure for normal and longpoll modes is a JSON +array of changes objects, and the last update sequence number. The +structure is described in the following table.

+ ++++ + + + + + + + + + + + + + + + + + + + + + + +
FieldDescription
last_seqLast change sequence number.
results [array]Changes made to a database
changes [array]List of changes, field-by-field, for this +document
idDocument ID
seqUpdate sequence number
+

The return format for continuous mode the server sends a CRLF +(carriage-return, linefeed) delimited line for each change. Each line +contains the JSON object.

+

You can also request the full contents of each document change (instead +of just the change notification) by using the include_docs +parameter.

+
+

9.1.4.1. Filtering

+

You can filter the contents of the changes feed in a number of ways. The +most basic way is to specify one or more document IDs to the query. This +causes the returned structure value to only contain changes for the +specified IDs. Note that the value of this query argument should be a +JSON formatted array.

+

You can also filter the _changes feed by defining a filter function +within a design document. The specification for the filter is the same +as for replication filters. You specify the name of the filter function +to the filter parameter, specifying the design document name and +filter name. For example:

+
GET /db/_changes?filter=design_doc/filtername
+
+

The _changes feed can be used to watch changes to specific document +ID’s or the list of _design documents in a database. If the +filters parameter is set to _doc_ids a list of doc IDs can be +passed in the doc_ids parameter as a JSON array. For more +information, see Changes Feed.

+
+
+
+

9.1.5. POST /db/_compact

+
    +
  • Method: POST /db/_compact
  • +
  • Request: None
  • +
  • Response: JSON success statement
  • +
  • Admin Privileges Required: yes
  • +
  • Return Codes:
      +
    • 202: +Compaction request has been accepted
    • +
    • 404: +The requested content could not be found. The returned content will include +further information, as a JSON object, if available.
    • +
    +
  • +
+

Request compaction of the specified database. Compaction compresses the +disk database file by performing the following operations:

+
    +
  • Writes a new version of the database file, removing any unused +sections from the new version during write. Because a new file is +temporary created for this purpose, you will need twice the current +storage space of the specified database in order for the compaction +routine to complete.
  • +
  • Removes old revisions of documents from the database, up to the +per-database limit specified by the _revs_limit database +parameter. See GET /db.
  • +
+

Compaction can only be requested on an individual database; you cannot +compact all the databases for a CouchDB instance. The compaction process +runs as a background process.

+

You can determine if the compaction process is operating on a database +by obtaining the database meta information, the compact_running +value of the returned database structure will be set to true. See +GET /db.

+

You can also obtain a list of running processes to determine whether +compaction is currently running. See GET /_active_tasks.

+
+
+

9.1.6. POST /db/_compact/design-doc

+
    +
  • Method: POST /db/_compact/design-doc
  • +
  • Request: None
  • +
  • Response: JSON success statement
  • +
  • Admin Privileges Required: yes
  • +
  • Return Codes:
      +
    • 202: +Compaction request has been accepted
    • +
    • 404: +The requested content could not be found. The returned content will include +further information, as a JSON object, if available.
    • +
    +
  • +
+

Compacts the view indexes associated with the specified design document. +You can use this in place of the full database compaction if you know a +specific set of view indexes have been affected by a recent database +change.

+

For example, to compact the views associated with the recipes design +document:

+
POST http://couchdb:5984/recipes/_compact/recipes
+Content-Type: application/json
+
+

CouchDB will immediately return with a status indicating that the +compaction request has been received (HTTP status code 202):

+
{
+   "ok" : true
+}
+
+
+
+
+

9.1.7. POST /db/_view_cleanup

+
    +
  • Method: POST /db/_view_cleanup
  • +
  • Request: None
  • +
  • Response: JSON success statement
  • +
  • Admin Privileges Required: yes
  • +
+

Cleans up the cached view output on disk for a given view. For example:

+
POST http://couchdb:5984/recipes/_view_cleanup
+Content-Type: application/json
+
+

If the request is successful, a basic status message us returned:

+
{
+   "ok" : true
+}
+
+
+
+
+

9.1.8. POST /db/_ensure_full_commit

+
    +
  • Method: POST /db/_ensure_full_commit
  • +
  • Request: None
  • +
  • Response: JSON success statement
  • +
  • Admin Privileges Required: no
  • +
  • Return Codes:
      +
    • 202: +Commit completed successfully
    • +
    • 404: +The requested content could not be found. The returned content will include +further information, as a JSON object, if available.
    • +
    +
  • +
+

Commits any recent changes to the specified database to disk. You should +call this if you want to ensure that recent changes have been written. +For example, to commit all the changes to disk for the database +recipes you would use:

+
POST http://couchdb:5984/recipes/_ensure_full_commit
+Content-Type: application/json
+
+

This returns a status message, containing the success message and the +timestamp for when the CouchDB instance was started:

+
{
+  "ok" : true,
+  "instance_start_time" : "1288186189373361"
+}
+
+
+
+
+

9.1.9. POST /db/_bulk_docs

+
    +
  • Method: POST /db/_bulk_docs
  • +
  • Request: JSON of the docs and updates to be applied
  • +
  • Response: JSON success statement
  • +
  • Admin Privileges Required: no
  • +
  • Return Codes:
      +
    • 201: +Document(s) have been created or updated
    • +
    +
  • +
+

The bulk document API allows you to create and update multiple documents +at the same time within a single request. The basic operation is similar +to creating or updating a single document, except that you batch the +document structure and information and . When creating new documents the +document ID is optional. For updating existing documents, you must +provide the document ID, revision information, and new document values.

+

For both inserts and updates the basic structure of the JSON is the +same:

+ ++++ + + + + + + + + + + + + + + + + + + + + + + +
FieldDescription
all_or_nothing (optional)Sets the database commit mode to use +all-or-nothing semantics
docs [array]Bulk Documents Document
_id (optional)List of changes, field-by-field, for this +document
_rev (optional)Document ID
_deleted (optional)Update sequence number
+
+

9.1.9.1. Inserting Documents in Bulk

+

To insert documents in bulk into a database you need to supply a JSON +structure with the array of documents that you want to add to the +database. Using this method you can either include a document ID, or +allow the document ID to be automatically generated.

+

For example, the following inserts three new documents, two with the +supplied document IDs, and one which will have a document ID generated:

+
{
+   "docs" : [
+      {
+         "_id" : "FishStew",
+         "servings" : 4,
+         "subtitle" : "Delicious with fresh bread",
+         "title" : "Fish Stew"
+      },
+      {
+         "_id" : "LambStew",
+         "servings" : 6,
+         "subtitle" : "Delicious with scone topping",
+         "title" : "Lamb Stew"
+      },
+      {
+         "servings" : 8,
+         "subtitle" : "Delicious with suet dumplings",
+         "title" : "Beef Stew"
+      },
+   ]
+}
+
+
+

The return type from a bulk insertion will be 201, with the content of +the returned structure indicating specific success or otherwise messages +on a per-document basis.

+

The return structure from the example above contains a list of the +documents created, here with the combination and their revision IDs:

+
POST http://couchdb:5984/recipes/_bulk_docs
+Content-Type: application/json
+
+[
+   {
+      "id" : "FishStew",
+      "rev" : "1-9c65296036141e575d32ba9c034dd3ee",
+   },
+   {
+      "id" : "LambStew",
+      "rev" : "1-34c318924a8f327223eed702ddfdc66d",
+   },
+   {
+      "id" : "7f7638c86173eb440b8890839ff35433",
+      "rev" : "1-857c7cbeb6c8dd1dd34a0c73e8da3c44",
+   }
+]
+
+

The content and structure of the returned JSON will depend on the transaction +semantics being used for the bulk update; see Bulk Documents Transaction Semantics for more +information. Conflicts and validation errors when updating documents in +bulk must be handled separately; see Bulk Document Validation and Conflict Errors.

+
+
+

9.1.9.2. Updating Documents in Bulk

+

The bulk document update procedure is similar to the insertion +procedure, except that you must specify the document ID and current +revision for every document in the bulk update JSON string.

+

For example, you could send the following request:

+
POST http://couchdb:5984/recipes/_bulk_docs
+Content-Type: application/json
+
+{
+   "docs" : [
+      {
+         "_id" : "FishStew",
+         "_rev" : "1-9c65296036141e575d32ba9c034dd3ee",
+         "servings" : 4,
+         "subtitle" : "Delicious with freshly baked bread",
+         "title" : "Fish Stew"
+      },
+      {
+         "_id" : "LambStew",
+         "_rev" : "1-34c318924a8f327223eed702ddfdc66d",
+         "servings" : 6,
+         "subtitle" : "Serve with a wholemeal scone topping",
+         "title" : "Lamb Stew"
+      },
+      {
+         "_id" : "7f7638c86173eb440b8890839ff35433"
+         "_rev" : "1-857c7cbeb6c8dd1dd34a0c73e8da3c44",
+         "servings" : 8,
+         "subtitle" : "Hand-made dumplings make a great accompaniment",
+         "title" : "Beef Stew"
+      }
+   ]
+}
+
+

The return structure is the JSON of the updated documents, with the new +revision and ID information:

+
[
+   {
+      "id" : "FishStew",
+      "rev" : "2-e7af4c4e9981d960ecf78605d79b06d1"
+   },
+   {
+      "id" : "LambStew",
+      "rev" : "2-0786321986194c92dd3b57dfbfc741ce"
+   },
+   {
+      "id" : "7f7638c86173eb440b8890839ff35433",
+      "rev" : "2-bdd3bf3563bee516b96885a66c743f8e"
+   }
+]
+
+
+

You can optionally delete documents during a bulk update by adding the +_deleted field with a value of true to each document ID/revision +combination within the submitted JSON structure.

+

The return type from a bulk insertion will be 201, with the content of +the returned structure indicating specific success or otherwise messages +on a per-document basis.

+

The content and structure of the returned JSON will depend on the transaction +semantics being used for the bulk update; see Bulk Documents Transaction Semantics for more +information. Conflicts and validation errors when updating documents in +bulk must be handled separately; see Bulk Document Validation and Conflict Errors.

+
+
+

9.1.9.3. Bulk Documents Transaction Semantics

+

CouchDB supports two different modes for updating (or inserting) +documents using the bulk documentation system. Each mode affects both +the state of the documents in the event of system failure, and the level +of conflict checking performed on each document. The two modes are:

+
    +
  • non-atomic

    +

    The default mode is non-atomic, that is, CouchDB will only guarantee +that some of the documents will be saved when you send the request. +The response will contain the list of documents successfully inserted +or updated during the process. In the event of a crash, some of the +documents may have been successfully saved, and some will have been +lost.

    +

    In this mode, the response structure will indicate whether the +document was updated by supplying the new _rev parameter +indicating a new document revision was created. If the update failed, +then you will get an error of type conflict. For example:

    +
    [
    +   {
    +      "id" : "FishStew",
    +      "error" : "conflict",
    +      "reason" : "Document update conflict."
    +   },
    +   {
    +      "id" : "LambStew",
    +      "error" : "conflict",
    +      "reason" : "Document update conflict."
    +   },
    +   {
    +      "id" : "7f7638c86173eb440b8890839ff35433",
    +      "error" : "conflict",
    +      "reason" : "Document update conflict."
    +   }
    +]
    +
    +
    +

    In this case no new revision has been created and you will need to +submit the document update, with the correct revision tag, to update +the document.

    +
  • +
  • all-or-nothing

    +

    In all-or-nothing mode, either all documents are written to the +database, or no documents are written to the database, in the event +of a system failure during commit.

    +

    In addition, the per-document conflict checking is not performed. +Instead a new revision of the document is created, even if the new +revision is in conflict with the current revision in the database. +The returned structure contains the list of documents with new +revisions:

    +
    [
    +   {
    +      "id" : "FishStew",
    +      "rev" : "2-e7af4c4e9981d960ecf78605d79b06d1"
    +   },
    +   {
    +      "id" : "LambStew",
    +      "rev" : "2-0786321986194c92dd3b57dfbfc741ce"
    +   },
    +   {
    +      "id" : "7f7638c86173eb440b8890839ff35433",
    +      "rev" : "2-bdd3bf3563bee516b96885a66c743f8e"
    +   }
    +]
    +
    +
    +

    When updating documents using this mode the revision of a document +included in views will be arbitrary. You can check the conflict +status for a document by using the conflicts=true query argument +when accessing the view. Conflicts should be handled individually to +ensure the consistency of your database.

    +

    To use this mode, you must include the all_or_nothing field (set +to true) within the main body of the JSON of the request.

    +
  • +
+

The effects of different database operations on the different modes are +summarized below:

+
    +
  • Transaction Mode: Non-atomic
      +
    • Transaction: Insert
        +
      • Cause: Requested document ID already exists
      • +
      • Resolution: Resubmit with different document ID, or update the +existing document
      • +
      +
    • +
    • Transaction: Update
        +
      • Cause: Revision missing or incorrect
      • +
      • Resolution: Resubmit with correct revision
      • +
      +
    • +
    +
  • +
  • Transaction Mode: All-or-nothing
      +
    • Transaction: Insert / Update
        +
      • Cause: Additional revision inserted
      • +
      • Resolution: Resolve conflicted revisions
      • +
      +
    • +
    +
  • +
+

Replication of documents is independent of the type of insert or update. +The documents and revisions created during a bulk insert or update are +replicated in the same way as any other document. This can mean that if +you make use of the all-or-nothing mode the exact list of documents, +revisions (and their conflict state) may or may not be replicated to +other databases correctly.

+
+
+

9.1.9.4. Bulk Document Validation and Conflict Errors

+

The JSON returned by the _bulk_docs operation consists of an array +of JSON structures, one for each document in the original submission. +The returned JSON structure should be examined to ensure that all of the +documents submitted in the original request were successfully added to +the database.

+

The exact structure of the returned information is:

+ ++++ + + + + + + + + + + + + + + + + + + + +
FieldDescription
docs [array]Bulk Documents Document
idDocument ID
errorError type
reasonError string with extended reason
+

When a document (or document revision) is not correctly committed to the +database because of an error, you should check the error field to +determine error type and course of action. Errors will be one of the +following type:

+
    +
  • conflict

    +

    The document as submitted is in conflict. If you used the default +bulk transaction mode then the new revision will not have been +created and you will need to re-submit the document to the database. +If you used all-or-nothing mode then you will need to manually +resolve the conflicted revisions of the document.

    +

    Conflict resolution of documents added using the bulk docs interface +is identical to the resolution procedures used when resolving +conflict errors during replication.

    +
  • +
  • forbidden

    +

    Entries with this error type indicate that the validation routine +applied to the document during submission has returned an error.

    +

    For example, if your validation routine includes the following:

    +
    throw({forbidden: 'invalid recipe ingredient'});
    +
    +
    +

    The error returned will be:

    +
    {
    +   "id" : "7f7638c86173eb440b8890839ff35433",
    +   "error" : "forbidden",
    +   "reason" : "invalid recipe ingredient"
    +}
    +
    +
    +
  • +
+
+
+
+

9.1.10. POST /db/_temp_view

+
    +
  • Method: POST /db/_temp_view
  • +
  • Request: JSON with the temporary view definition
  • +
  • Response: Temporary view result set
  • +
  • Admin Privileges Required: yes
  • +
+

Creates (and executes) a temporary view based on the view function +supplied in the JSON request. For example:

+
POST http://couchdb:5984/recipes/_temp_view
+Content-Type: application/json
+
+{
+   "map" : "function(doc) { if (doc.value > 9995) { emit(null, doc.value); } }"
+}
+
+

The resulting JSON response is the result from the execution of the +temporary view:

+
{
+   "total_rows" : 3,
+   "rows" : [
+      {
+         "value" : 9998.41913029012,
+         "id" : "05361cc6aa42033878acc1bacb1f39c2",
+         "key" : null
+      },
+      {
+         "value" : 9998.94149934853,
+         "id" : "1f443f471e5929dd7b252417625ed170",
+         "key" : null
+      },
+      {
+         "value" : 9998.01511339154,
+         "id" : "1f443f471e5929dd7b252417629c102b",
+         "key" : null
+      }
+   ],
+   "offset" : 0
+}
+
+
+

The arguments also available to standard view requests also apply to +temporary views, but the execution of the view may take some time as it +relies on being executed at the time of the request. In addition to the +time taken, they are also computationally very expensive to produce. You +should use a defined view if you want to achieve the best performance.

+
+
+

9.1.11. POST /db/_purge

+
    +
  • Method: POST /db/_purge
  • +
  • Request: JSON of the document IDs/revisions to be purged
  • +
  • Response: JSON structure with purged documents and purge sequence
  • +
  • Admin Privileges Required: no
  • +
+

Occasionally something into ends up in your database that should never have +written to it, like a password or private key. Purge can help you here.

+

A database purge permanently removes the references to deleted documents +from the database. Deleting a document within CouchDB does not actually +remove the document from the database, instead, the document is marked as +a deleted (and a new revision is created). This is to ensure that +deleted documents are replicated to other databases as having been +deleted. This also means that you can check the status of a document and +identify that the document has been deleted.

+
+

Note

+

Don’t use purge as a regular operation, it is designed to be used +in exceptional cases.

+
+

The purge operation removes the references to the deleted documents from +the database. The purging of old documents is not replicated to other +databases. If you are replicating between databases and have deleted a +large number of documents you should run purge on each database.

+
+

Note

+

Purging documents does not remove the space used by them on disk. To +reclaim disk space, you should run a database compact (see +POST /db/_compact), and compact views (see POST /db/_compact/design-doc).

+
+

To perform a purge operation you must send a request including the JSON +of the document IDs that you want to purge. For example:

+
POST http://couchdb:5984/recipes/_purge
+Content-Type: application/json
+
+{
+  "FishStew" : [
+    "17-b3eb5ac6fbaef4428d712e66483dcb79"
+    ]
+}
+
+

The format of the request must include the document ID and one or more +revisions that must be purged.

+

The response will contain the purge sequence number, and a list of the +document IDs and revisions successfully purged.

+
{
+   "purged" : {
+      "FishStew" : [
+         "17-b3eb5ac6fbaef4428d712e66483dcb79"
+      ]
+   },
+   "purge_seq" : 11
+}
+
+
+
+

9.1.11.1. Updating Indexes

+

The number of purges on a database is tracked using a purge sequence. +This is used by the view indexer to optimize the updating of views that +contain the purged documents.

+

When the indexer identifies that the purge sequence on a database has +changed, it compares the purge sequence of the database with that stored +in the view index. If the difference between the stored sequence and +database is sequence is only 1, then the indexer uses a cached list of +the most recently purged documents, and then removes these documents +from the index individually. This prevents completely rebuilding the +index from scratch.

+

If the difference between the stored sequence number and current +database sequence is greater than 1, then the view index is entirely +rebuilt. This is an expensive operation as every document in the +database must be examined.

+
+
+
+

9.1.12. GET /db/_all_docs

+
    +
  • Method: GET /db/_all_docs
  • +
  • Request: None
  • +
  • Response: JSON object containing document information, ordered by the +document ID
  • +
  • Admin Privileges Required: no
  • +
  • Query Arguments:
      +
    • Argument: descending
        +
      • Description: Return the documents in descending by key order
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Default: false
      • +
      +
    • +
    • Argument: endkey
        +
      • Description: Stop returning records when the specified key is reached
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    • Argument: endkey_docid
        +
      • Description: Stop returning records when the specified document ID is +reached
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    • Argument: group
        +
      • Description: Group the results using the reduce function to a group +or single row
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Default: false
      • +
      +
    • +
    • Argument: group_level
        +
      • Description: Specify the group level to be used
      • +
      • Optional: yes
      • +
      • Type: numeric
      • +
      +
    • +
    • Argument: include_docs
        +
      • Description: Include the full content of the documents in the return
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Default: false
      • +
      +
    • +
    • Argument: inclusive_end
        +
      • Description: Specifies whether the specified end key should be +included in the result
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Default: true
      • +
      +
    • +
    • Argument: key
        +
      • Description: Return only documents that match the specified key
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    • Argument: limit
        +
      • Description: Limit the number of the returned documents to the +specified number
      • +
      • Optional: yes
      • +
      • Type: numeric
      • +
      +
    • +
    • Argument: reduce
        +
      • Description: Use the reduction function
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Default: true
      • +
      +
    • +
    • Argument: skip
        +
      • Description: Skip this number of records before starting to return +the results
      • +
      • Optional: yes
      • +
      • Type: numeric
      • +
      • Default: 0
      • +
      +
    • +
    • Argument: stale
        +
      • Description: Allow the results from a stale view to be used
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      • Default:
      • +
      • Supported Values:
          +
        • ok: Allow stale views
        • +
        +
      • +
      +
    • +
    • Argument: startkey
        +
      • Description: Return records starting with the specified key
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    • Argument: startkey_docid
        +
      • Description: Return records starting with the specified document ID
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    +
  • +
+

Returns a JSON structure of all of the documents in a given database. +The information is returned as a JSON structure containing meta +information about the return structure, and the list documents and basic +contents, consisting the ID, revision and key. The key is generated from +the document ID.

+ ++++ + + + + + + + + + + + + + + + + + + + +
FieldDescription
offsetOffset where the document list started
rows [array]Array of document object
total_rowsNumber of documents in the database/view
update_seqCurrent update sequence for the database
+

By default the information returned contains only the document ID and +revision. For example, the request:

+
GET http://couchdb:5984/recipes/_all_docs
+Accept: application/json
+
+

Returns the following structure:

+
{
+   "total_rows" : 18386,
+   "rows" : [
+      {
+         "value" : {
+            "rev" : "1-bc0d5aed1e339b1cc1f29578f3220a45"
+         },
+         "id" : "Aberffrawcake",
+         "key" : "Aberffrawcake"
+      },
+      {
+         "value" : {
+            "rev" : "3-68a20c89a5e70357c20148f8e82ca331"
+         },
+         "id" : "Adukiandorangecasserole-microwave",
+         "key" : "Adukiandorangecasserole-microwave"
+      },
+      {
+         "value" : {
+            "rev" : "3-9b2851ed9b6f655cc4eb087808406c60"
+         },
+         "id" : "Aioli-garlicmayonnaise",
+         "key" : "Aioli-garlicmayonnaise"
+      },
+      ...
+         ],
+   "offset" : 0
+}
+
+
+

The information is returned in the form of a temporary view of all the +database documents, with the returned key consisting of the ID of the +document. The remainder of the interface is therefore identical to the +View query arguments and their behavior.

+
+
+

9.1.13. POST /db/_all_docs

+
    +
  • Method: POST /db/_all_docs
  • +
  • Request: JSON of the document IDs you want included
  • +
  • Response: JSON of the returned view
  • +
  • Admin Privileges Required: no
  • +
+

The POST to _all_docs allows to specify multiple keys to be +selected from the database. This enables you to request multiple +documents in a single request, in place of multiple +GET /db/doc requests.

+

The request body should contain a list of the keys to be returned as an +array to a keys object. For example:

+
POST http://couchdb:5984/recipes/_all_docs
+User-Agent: MyApp/0.1 libwww-perl/5.837
+
+{
+   "keys" : [
+      "Zingylemontart",
+      "Yogurtraita"
+   ]
+}
+
+

The return JSON is the all documents structure, but with only the +selected keys in the output:

+
{
+   "total_rows" : 2666,
+   "rows" : [
+      {
+         "value" : {
+            "rev" : "1-a3544d296de19e6f5b932ea77d886942"
+         },
+         "id" : "Zingylemontart",
+         "key" : "Zingylemontart"
+      },
+      {
+         "value" : {
+            "rev" : "1-91635098bfe7d40197a1b98d7ee085fc"
+         },
+         "id" : "Yogurtraita",
+         "key" : "Yogurtraita"
+      }
+   ],
+   "offset" : 0
+}
+
+
+
+
+

9.1.14. POST /db/_missing_revs

+
    +
  • Method: POST /db/_missing_revs
  • +
  • Request: JSON list of document revisions
  • +
  • Response: JSON of missing revisions
  • +
  • Admin Privileges Required: no
  • +
+
+
+

9.1.15. POST /db/_revs_diff

+
    +
  • Method: POST /db/_revs_diff
  • +
  • Request: JSON list of document revisions
  • +
  • Response: JSON list of differences from supplied document/revision list
  • +
  • Admin Privileges Required: no
  • +
+
+
+

9.1.16. GET /db/_security

+
    +
  • Method: GET /db/_security
  • +
  • Request: None
  • +
  • Response: JSON of the security object
  • +
  • Admin Privileges Required: no
  • +
+

Gets the current security object from the specified database. The +security object consists of two compulsory elements, admins and +readers, which are used to specify the list of users and/or roles +that have admin and reader rights to the database respectively. Any +additional fields in the security object are optional. The entire +security object is made available to validation and other internal +functions so that the database can control and limit functionality.

+

To get the existing security object you would send the following +request:

+
{
+   "admins" : {
+      "roles" : [],
+      "names" : [
+         "mc",
+         "slp"
+      ]
+   },
+   "readers" : {
+      "roles" : [],
+      "names" : [
+         "tim",
+         "brian"
+      ]
+   }
+}
+
+
+

Security object structure is:

+
    +
  • admins: Roles/Users with admin privileges
      +
    • roles [array]: List of roles with parent privilege
    • +
    • users [array]: List of users with parent privilege
    • +
    +
  • +
  • readers: Roles/Users with reader privileges
      +
    • roles [array]: List of roles with parent privilege
    • +
    • users [array]: List of users with parent privilege
    • +
    +
  • +
+
+

Note

+

If the security object for a database has never been set, then the +value returned will be empty.

+
+
+
+

9.1.17. PUT /db/_security

+
    +
  • Method: PUT /db/_security
  • +
  • Request: JSON specifying the admin and user security for the database
  • +
  • Response: JSON status message
  • +
  • Admin Privileges Required: no
  • +
+

Sets the security object for the given database.For example, to set the +security object for the recipes database:

+
PUT http://couchdb:5984/recipes/_security
+Content-Type: application/json
+
+{
+   "admins" : {
+      "roles" : [],
+      "names" : [
+         "mc",
+         "slp"
+      ]
+   },
+   "readers" : {
+      "roles" : [],
+      "names" : [
+         "tim",
+         "brian"
+      ]
+   }
+}
+
+
+

If the setting was successful, a JSON status object will be returned:

+
{
+   "ok" : true
+}
+
+
+
+
+

9.1.18. GET /db/_revs_limit

+
    +
  • Method: GET /db/_revs_limit
  • +
  • Request: None
  • +
  • Response: The current revision limit setting
  • +
  • Admin Privileges Required: no
  • +
+

Gets the current revs_limit (revision limit) setting.

+

For example to get the current limit:

+
GET http://couchdb:5984/recipes/_revs_limit
+Content-Type: application/json
+
+

The returned information is the current setting as a numerical scalar:

+
1000
+
+
+
+
+

9.1.19. PUT /db/_revs_limit

+
    +
  • Method: PUT /db/_revs_limit
  • +
  • Request: A scalar integer of the revision limit setting
  • +
  • Response: Confirmation of setting of the revision limit
  • +
  • Admin Privileges Required: no
  • +
+

Sets the maximum number of document revisions that will be tracked by +CouchDB, even after compaction has occurred. You can set the revision +limit on a database by using PUT with a scalar integer of the limit +that you want to set as the request body.

+

For example to set the revs limit to 100 for the recipes database:

+
PUT http://couchdb:5984/recipes/_revs_limit
+Content-Type: application/json
+
+100
+
+

If the setting was successful, a JSON status object will be returned:

+
{
+   "ok" : true
+}
+
+
+
+
+ + +
+
+
+ +
+
+ + + + \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/api/design.html couchdb-1.4.0~rc.1/share/doc/build/html/api/design.html --- couchdb-1.2.0/share/doc/build/html/api/design.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/api/design.html 2013-08-23 10:58:44.000000000 -0400 @@ -0,0 +1,1384 @@ + + + + + + + + + + 9.4. Design Document Methods — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

9.4. Design Document Methods

+

In CouchDB, design documents provide the main interface for building a +CouchDB application. The design document defines the views used to +extract information from CouchDB through one or more views. Design +documents are created within your CouchDB instance in the same way as +you create database documents, but the content and definition of the +documents is different. Design Documents are named using an ID defined +with the design document URL path, and this URL can then be used to +access the database contents.

+

Views and lists operate together to provide automated (and formatted) +output from your database.

+

A list of the available methods and URL paths are provided below:

+

Design Document API Calls

+
+

9.4.1. GET /db/_design/design-doc

+
    +
  • Method: GET /db/_design/design-doc

    +
  • +
  • Request: None

    +
  • +
  • Response: JSON of the existing design document

    +
  • +
  • Admin Privileges Required: no

    +
  • +
  • Query Arguments:

    +
      +
    • Argument: rev

      +
        +
      • Description: Specify the revision to return
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    • Argument: revs

      +
        +
      • Description: Return a list of the revisions for the document

        +
      • +
      • Optional: yes

        +
      • +
      • Type: boolean

        +
      • +
      • Supported Values:

        +
        +
          +
        • true: Includes the revisions
        • +
        +
        +
      • +
      +
    • +
    • Argument: revs_info

      +
        +
      • Description: Return a list of detailed revision information for the +document
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Supported Values:
          +
        • true: Includes the revisions
        • +
        +
      • +
      +
    • +
    +
  • +
+

Returns the specified design document, design-doc from the specified +db. For example, to retrieve the design document recipes you +would send the following request:

+
GET http://couchdb:5984/recipes/_design/recipes
+Content-Type: application/json
+
+

The returned string will be the JSON of the design document:

+
{
+   "_id" : "_design/recipes",
+   "_rev" : "5-39f56a392b86bbee57e2138921346406"
+   "language" : "javascript",
+   "views" : {
+      "by_recipe" : {
+         "map" : "function(doc) { if (doc.title != null) emit(doc.title, doc) }"
+      },
+   },
+}
+
+
+

A list of the revisions can be obtained by using the revs query +argument, or an extended list of revisions using the revs_info query +argument. This operates in the same way as for other documents. Fur +further examples, see GET /db/doc.

+
+
+

9.4.2. PUT /db/_design/design-doc

+
    +
  • Method: PUT /db/_design/design-doc
  • +
  • Request: JSON of the design document
  • +
  • Response: JSON status
  • +
  • Admin Privileges Required: no
  • +
+

Upload the specified design document, design-doc, to the specified +database. The design document should follow the definition of a design +document, as summarised in the following table.

+
    +
  • _id: Design Document ID
  • +
  • _rev: Design Document Revision
  • +
  • views: View
      +
    • viewname: View Definition
        +
      • map: Map Function for View
      • +
      • reduce (optional): Reduce Function for View
      • +
      +
    • +
    +
  • +
+

For more information on writing views, see GET /db/_design/design-doc/_view/view-name.

+
+
+

9.4.3. DELETE /db/_design/design-doc

+
    +
  • Method: DELETE /db/_design/design-doc
  • +
  • Request: None
  • +
  • Response: JSON of deleted design document
  • +
  • Admin Privileges Required: no
  • +
  • Query Arguments:
      +
    • Argument: rev
        +
      • Description: Current revision of the document for validation
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    +
  • +
  • HTTP Headers
      +
    • Header: If-Match
        +
      • Description: Current revision of the document for validation
      • +
      • Optional: yes
      • +
      +
    • +
    +
  • +
  • Return Codes:
      +
    • 409: +Supplied revision is incorrect or missing
    • +
    +
  • +
+

Delete an existing design document. Deleting a design document also +deletes all of the associated view indexes, and recovers the +corresponding space on disk for the indexes in question.

+

To delete, you must specify the current revision of the design document +using the rev query argument.

+

For example:

+
DELETE http://couchdb:5984/recipes/_design/recipes?rev=2-ac58d589b37d01c00f45a4418c5a15a8
+Content-Type: application/json
+
+

The response contains the delete document ID and revision:

+
{
+   "id" : "recipe/_design/recipes"
+   "ok" : true,
+   "rev" : "3-7a05370bff53186cb5d403f861aca154",
+}
+
+
+
+
+

9.4.4. COPY /db/_design/design-doc

+
    +
  • Method: COPY /db/_design/design-doc
  • +
  • Request: None
  • +
  • Response: JSON of the new document and revision
  • +
  • Admin Privileges Required: no
  • +
  • Query Arguments:
      +
    • Argument: rev
        +
      • Description: Revision to copy from
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    +
  • +
  • HTTP Headers
      +
    • Header: Destination
        +
      • Description: Destination document (and optional revision)
      • +
      • Optional: no
      • +
      +
    • +
    +
  • +
+

The COPY command (non-standard HTTP) copies an existing design +document to a new or existing document.

+

The source design document is specified on the request line, with the +Destination HTTP Header of the request specifying the target +document.

+
+

9.4.4.1. Copying a Design Document

+

To copy the latest version of a design document to a new document you +specify the base document and target document:

+
COPY http://couchdb:5984/recipes/_design/recipes
+Content-Type: application/json
+Destination: /recipes/_design/recipelist
+
+

The above request copies the design document recipes to the new +design document recipelist. The response is the ID and revision of +the new document.

+
{
+   "id" : "recipes/_design/recipelist"
+   "rev" : "1-9c65296036141e575d32ba9c034dd3ee",
+}
+
+
+
+

Note

+

Copying a design document does automatically reconstruct the view +indexes. These will be recreated, as with other views, the first +time the new view is accessed.

+
+
+
+

9.4.4.2. Copying from a Specific Revision

+

To copy from a specific version, use the rev argument to the query +string:

+
COPY http://couchdb:5984/recipes/_design/recipes?rev=1-e23b9e942c19e9fb10ff1fde2e50e0f5
+Content-Type: application/json
+Destination: recipes/_design/recipelist
+
+

The new design document will be created using the specified revision of +the source document.

+
+
+

9.4.4.3. Copying to an Existing Design Document

+

To copy to an existing document, you must specify the current revision +string for the target document, using the rev parameter to the +Destination HTTP Header string. For example:

+
COPY http://couchdb:5984/recipes/_design/recipes
+Content-Type: application/json
+Destination: recipes/_design/recipelist?rev=1-9c65296036141e575d32ba9c034dd3ee
+
+

The return value will be the new revision of the copied document:

+
{
+   "id" : "recipes/_design/recipes"
+   "rev" : "2-55b6a1b251902a2c249b667dab1c6692",
+}
+
+
+
+
+
+

9.4.5. GET /db/_design/design-doc/attachment

+
    +
  • Method: GET /db/_design/design-doc/attachment
  • +
  • Request: None
  • +
  • Response: Returns the attachment data
  • +
  • Admin Privileges Required: no
  • +
+

Returns the file attachment attachment associated with the design +document /_design_/design-doc. The raw data of the associated +attachment is returned (just as if you were accessing a static file. The +returned HTTP Content-type will be the same as the content type set +when the document attachment was submitted into the database.

+
+
+

9.4.6. PUT /db/_design/design-doc/attachment

+
    +
  • Method: PUT /db/_design/design-doc/attachment
  • +
  • Request: Raw document data
  • +
  • Response: JSON document status
  • +
  • Admin Privileges Required: no
  • +
  • Query Arguments:
      +
    • Argument: rev
        +
      • Description: Current document revision
      • +
      • Optional: no
      • +
      • Type: string
      • +
      +
    • +
    +
  • +
  • HTTP Headers
      +
    • Header: Content-Length
        +
      • Description: Length (bytes) of the attachment being uploaded
      • +
      • Optional: no
      • +
      +
    • +
    • Header: Content-Type
        +
      • Description: MIME type for the uploaded attachment
      • +
      • Optional: no
      • +
      +
    • +
    • Header: If-Match
        +
      • Description: Current revision of the document for validation
      • +
      • Optional: yes
      • +
      +
    • +
    +
  • +
+

Upload the supplied content as an attachment to the specified design +document (/_design/design-doc). The attachment name provided +must be a URL encoded string. You must also supply either the rev +query argument or the If-Match HTTP header for validation, and the +HTTP headers (to set the attacment content type). The content type is +used when the attachment is requested as the corresponding content-type +in the returned document header.

+

For example, you could upload a simple text document using the following +request:

+
PUT http://couchdb:5984/recipes/_design/recipes/view.css?rev=7-f7114d4d81124b223283f3e89eee043e
+Content-Length: 39
+Content-Type: text/plain
+
+div.recipetitle {
+font-weight: bold;
+}
+
+

Or by using the If-Match HTTP header:

+
PUT http://couchdb:5984/recipes/FishStew/basic
+If-Match: 7-f7114d4d81124b223283f3e89eee043e
+Content-Length: 39
+Content-Type: text/plain
+
+div.recipetitle {
+font-weight: bold;
+}
+
+

The returned JSON contains the new document information:

+
{
+   "id" : "_design/recipes"
+   "ok" : true,
+   "rev" : "8-cb2b7d94eeac76782a02396ba70dfbf5",
+}
+
+
+
+

Note

+

Uploading an attachment updates the corresponding document revision. +Revisions are tracked for the parent document, not individual attachments.

+
+
+
+

9.4.7. DELETE /db/_design/design-doc/attachment

+
    +
  • Method: DELETE /db/_design/design-doc/attachment
  • +
  • Request: None
  • +
  • Response: JSON status
  • +
  • Admin Privileges Required: no
  • +
  • Query Arguments:
      +
    • Argument: rev
        +
      • Description: Current document revision
      • +
      • Optional: no
      • +
      • Type: string
      • +
      +
    • +
    +
  • +
  • HTTP Headers
      +
    • Header: If-Match
        +
      • Description: Current revision of the document for validation
      • +
      • Optional: yes
      • +
      +
    • +
    +
  • +
  • Return Codes:
      +
    • 200: +Attachment deleted successfully
    • +
    • 409: +Supplied revision is incorrect or missing
    • +
    +
  • +
+

Deletes the attachment attachment to the specified +_design/design-doc. You must supply the rev argument with the +current revision to delete the attachment.

+

For example to delete the attachment view.css from the design +document recipes:

+
DELETE http://couchdb:5984/recipes/_design/recipes/view.css?rev=9-3db559f13a845c7751d407404cdeaa4a
+
+

The returned JSON contains the updated revision information for the +parent document:

+
{
+   "id" : "_design/recipes"
+   "ok" : true,
+   "rev" : "10-f3b15bb408961f8dcc3d86c7d3b54c4c",
+}
+
+
+
+
+

9.4.8. GET /db/_design/design-doc/_info

+
    +
  • Method: GET /db/_design/design-doc/_info
  • +
  • Request: None
  • +
  • Response: JSON of the design document information
  • +
  • Admin Privileges Required: no
  • +
+

Obtains information about a given design document, including the index, +index size and current status of the design document and associated +index information.

+

For example, to get the information for the recipes design document:

+
GET http://couchdb:5984/recipes/_design/recipes/_info
+Content-Type: application/json
+
+

This returns the following JSON structure:

+
{
+   "name" : "recipes"
+   "view_index" : {
+      "compact_running" : false,
+      "updater_running" : false,
+      "language" : "javascript",
+      "purge_seq" : 10,
+      "waiting_commit" : false,
+      "waiting_clients" : 0,
+      "signature" : "fc65594ee76087a3b8c726caf5b40687",
+      "update_seq" : 375031,
+      "disk_size" : 16491
+   },
+}
+
+
+

The individual fields in the returned JSON structure are detailed below:

+
    +
  • name: Name/ID of Design Document
  • +
  • view_index: View Index
      +
    • compact_running: Indicates whether a compaction routine is currently +running on the view
    • +
    • disk_size: Size in bytes of the view as stored on disk
    • +
    • language: Language for the defined views
    • +
    • purge_seq: The purge sequence that has been processed
    • +
    • signature: MD5 signature of the views for the design document
    • +
    • update_seq: The update sequence of the corresponding database that +has been indexed
    • +
    • updater_running: Indicates if the view is currently being updated
    • +
    • waiting_clients: Number of clients waiting on views from this design +document
    • +
    • waiting_commit: Indicates if there are outstanding commits to the +underlying database that need to processed
    • +
    +
  • +
+
+
+

9.4.9. GET /db/_design/design-doc/_view/view-name

+
    +
  • Method: GET /db/_design/design-doc/_view/view-name
  • +
  • Request: None
  • +
  • Response: JSON of the documents returned by the view
  • +
  • Admin Privileges Required: no
  • +
  • Query Arguments:
      +
    • Argument: descending
        +
      • Description: Return the documents in descending by key order
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Default: false
      • +
      +
    • +
    • Argument: endkey
        +
      • Description: Stop returning records when the specified key is reached
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    • Argument: endkey_docid
        +
      • Description: Stop returning records when the specified document +ID is reached
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    • Argument: group
        +
      • Description: Group the results using the reduce function to a +group or single row
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Default: false
      • +
      +
    • +
    • Argument: group_level
        +
      • Description: Specify the group level to be used
      • +
      • Optional: yes
      • +
      • Type: numeric
      • +
      +
    • +
    • Argument: include_docs
        +
      • Description: Include the full content of the documents in the return
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Default: false
      • +
      +
    • +
    • Argument: inclusive_end
        +
      • Description: Specifies whether the specified end key should be +included in the result
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Default: true
      • +
      +
    • +
    • Argument: key
        +
      • Description: Return only documents that match the specified key
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    • Argument: limit
        +
      • Description: Limit the number of the returned documents to the +specified number
      • +
      • Optional: yes
      • +
      • Type: numeric
      • +
      +
    • +
    • Argument: reduce
        +
      • Description: Use the reduction function
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Default: true
      • +
      +
    • +
    • Argument: skip
        +
      • Description: Skip this number of records before starting to return +the results
      • +
      • Optional: yes
      • +
      • Type: numeric
      • +
      • Default: 0
      • +
      +
    • +
    • Argument: stale
        +
      • Description: Allow the results from a stale view to be used
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      • Default:
      • +
      • Supported Values
          +
        • ok: Allow stale views
        • +
        +
      • +
      +
    • +
    • Argument: startkey
        +
      • Description: Return records starting with the specified key
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    • Argument: startkey_docid
        +
      • Description: Return records starting with the specified document ID
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    • Argument: update_seq
        +
      • Description: Include the update sequence in the generated results
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Default: false
      • +
      +
    • +
    +
  • +
+

Executes the specified view-name from the specified design-doc +design document.

+
+

9.4.9.1. Querying Views and Indexes

+

The definition of a view within a design document also creates an index +based on the key information defined within each view. The production +and use of the index significantly increases the speed of access and +searching or selecting documents from the view.

+

However, the index is not updated when new documents are added or +modified in the database. Instead, the index is generated or updated, +either when the view is first accessed, or when the view is accessed +after a document has been updated. In each case, the index is updated +before the view query is executed against the database.

+

View indexes are updated incrementally in the following situations:

+
    +
  • A new document has been added to the database.
  • +
  • A document has been deleted from the database.
  • +
  • A document in the database has been updated.
  • +
+

View indexes are rebuilt entirely when the view definition changes. To +achieve this, a ‘fingerprint’ of the view definition is created when the +design document is updated. If the fingerprint changes, then the view +indexes are entirely rebuilt. This ensures that changes to the view +definitions are reflected in the view indexes.

+
+

Note

+

View index rebuilds occur when one view from the same the view group +(i.e. all the views defined within a single a design document) has +been determined as needing a rebuild. For example, if if you have a +design document with different views, and you update the database, +all three view indexes within the design document will be updated.

+
+

Because the view is updated when it has been queried, it can result in a +delay in returned information when the view is accessed, especially if +there are a large number of documents in the database and the view index +does not exist. There are a number of ways to mitigate, but not +completely eliminate, these issues. These include:

+
    +
  • Create the view definition (and associated design documents) on your +database before allowing insertion or updates to the documents. If +this is allowed while the view is being accessed, the index can be +updated incrementally.
  • +
  • Manually force a view request from the database. You can do this +either before users are allowed to use the view, or you can access +the view manually after documents are added or updated.
  • +
  • Use the /db/_changes method to monitor for changes to the +database and then access the view to force the corresponding view +index to be updated. See GET /db/_changes for more information.
  • +
  • Use a monitor with the update_notification section of the CouchDB +configuration file to monitor for changes to your database, and +trigger a view query to force the view to be updated. For more +information, see Update Notifications.
  • +
+

None of these can completely eliminate the need for the indexes to be +rebuilt or updated when the view is accessed, but they may lessen the +effects on end-users of the index update affecting the user experience.

+

Another alternative is to allow users to access a ‘stale’ version of the +view index, rather than forcing the index to be updated and displaying +the updated results. Using a stale view may not return the latest +information, but will return the results of the view query using an +existing version of the index.

+

For example, to access the existing stale view by_recipe in the +recipes design document:

+
http://couchdb:5984/recipes/_design/recipes/_view/by_recipe?stale=ok
+
+
+

Accessing a stale view:

+
    +
  • Does not trigger a rebuild of the view indexes, even if there have +been changes since the last access.
  • +
  • Returns the current version of the view index, if a current version +exists.
  • +
  • Returns an empty result set if the given view index does exist.
  • +
+

As an alternative, you use the update_after value to the stale +parameter. This causes the view to be returned as a stale view, but for +the update process to be triggered after the view information has been +returned to the client.

+

In addition to using stale views, you can also make use of the +update_seq query argument. Using this query argument generates the +view information including the update sequence of the database from +which the view was generated. The returned value can be compared this to +the current update sequence exposed in the database information +(returned by GET /db).

+
+
+

9.4.9.2. Sorting Returned Rows

+

Each element within the returned array is sorted using native UTF-8 +sorting according to the contents of the key portion of the emitted +content. The basic order of output is as follows:

+
    +
  • null
  • +
  • false
  • +
  • true
  • +
  • Numbers
  • +
  • Text (case sensitive, lowercase first)
  • +
  • Arrays (according to the values of each element, in order)
  • +
  • Objects (according to the values of keys, in key order)
  • +
+

You can reverse the order of the returned view information by using the +descending query value set to true. For example, Retrieving the list +of recipes using the by_title (limited to 5 records) view:

+
{
+   "offset" : 0,
+   "rows" : [
+      {
+         "id" : "3-tiersalmonspinachandavocadoterrine",
+         "key" : "3-tier salmon, spinach and avocado terrine",
+         "value" : [
+            null,
+            "3-tier salmon, spinach and avocado terrine"
+         ]
+      },
+      {
+         "id" : "Aberffrawcake",
+         "key" : "Aberffraw cake",
+         "value" : [
+            null,
+            "Aberffraw cake"
+         ]
+      },
+      {
+         "id" : "Adukiandorangecasserole-microwave",
+         "key" : "Aduki and orange casserole - microwave",
+         "value" : [
+            null,
+            "Aduki and orange casserole - microwave"
+         ]
+      },
+      {
+         "id" : "Aioli-garlicmayonnaise",
+         "key" : "Aioli - garlic mayonnaise",
+         "value" : [
+            null,
+            "Aioli - garlic mayonnaise"
+         ]
+      },
+      {
+         "id" : "Alabamapeanutchicken",
+         "key" : "Alabama peanut chicken",
+         "value" : [
+            null,
+            "Alabama peanut chicken"
+         ]
+      }
+   ],
+   "total_rows" : 2667
+}
+
+
+

Requesting the same in descending order will reverse the entire view +content. For example the request

+
GET http://couchdb:5984/recipes/_design/recipes/_view/by_title?limit=5&descending=true
+Accept: application/json
+Content-Type: application/json
+
+

Returns the last 5 records from the view:

+
{
+   "offset" : 0,
+   "rows" : [
+      {
+         "id" : "Zucchiniinagrodolcesweet-sourcourgettes",
+         "key" : "Zucchini in agrodolce (sweet-sour courgettes)",
+         "value" : [
+            null,
+            "Zucchini in agrodolce (sweet-sour courgettes)"
+         ]
+      },
+      {
+         "id" : "Zingylemontart",
+         "key" : "Zingy lemon tart",
+         "value" : [
+            null,
+            "Zingy lemon tart"
+         ]
+      },
+      {
+         "id" : "Zestyseafoodavocado",
+         "key" : "Zesty seafood avocado",
+         "value" : [
+            null,
+            "Zesty seafood avocado"
+         ]
+      },
+      {
+         "id" : "Zabaglione",
+         "key" : "Zabaglione",
+         "value" : [
+            null,
+            "Zabaglione"
+         ]
+      },
+      {
+         "id" : "Yogurtraita",
+         "key" : "Yogurt raita",
+         "value" : [
+            null,
+            "Yogurt raita"
+         ]
+      }
+   ],
+   "total_rows" : 2667
+}
+
+
+

The sorting direction is applied before the filtering applied using the +startkey and endkey query arguments. For example the following +query:

+
GET http://couchdb:5984/recipes/_design/recipes/_view/by_ingredient?startkey=%22carrots%22&endkey=%22egg%22
+Accept: application/json
+Content-Type: application/json
+
+

Will operate correctly when listing all the matching entries between +“carrots” and egg. If the order of output is reversed with the +descending query argument, the view request will return no entries:

+
GET http://couchdb:5984/recipes/_design/recipes/_view/by_ingredient?descending=true&startkey=%22carrots%22&endkey=%22egg%22
+Accept: application/json
+Content-Type: application/json
+
+

The returned result is empty:

+
{
+   "total_rows" : 26453,
+   "rows" : [],
+   "offset" : 21882
+}
+
+
+

The results will be empty because the entries in the view are reversed +before the key filter is applied, and therefore the endkey of “egg” +will be seen before the startkey of “carrots”, resulting in an empty +list.

+

Instead, you should reverse the values supplied to the startkey and +endkey parameters to match the descending sorting applied to the +keys. Changing the previous example to:

+
GET http://couchdb:5984/recipes/_design/recipes/_view/by_ingredient?descending=true&startkey=%22egg%22&endkey=%22carrots%22
+Accept: application/json
+Content-Type: application/json
+
+
+
+

9.4.9.3. Specifying Start and End Values

+

The startkey and endkey query arguments can be used to specify +the range of values to be displayed when querying the view.

+
+
+

9.4.9.4. Using Limits and Skipping Rows

+

TBC

+
+
+

9.4.9.5. View Reduction and Grouping

+

TBC

+
+
+
+

9.4.10. POST /db/_design/design-doc/_view/view-name

+
    +
  • Method: POST /db/_design/design-doc/_view/view-name
  • +
  • Request: List of keys to be returned from specified view
  • +
  • Response: JSON of the documents returned by the view
  • +
  • Admin Privileges Required: no
  • +
  • Query Arguments:
      +
    • Argument: descending
        +
      • Description: Return the documents in descending by key order
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Default: false
      • +
      +
    • +
    • Argument: endkey
        +
      • Description: Stop returning records when the specified key is reached
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    • Argument: endkey_docid
        +
      • Description: Stop returning records when the specified document ID +is reached
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    • Argument: group
        +
      • Description: Group the results using the reduce function to a group +or single row
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Default: false
      • +
      +
    • +
    • Argument: group_level
        +
      • Description: Specify the group level to be used
      • +
      • Optional: yes
      • +
      • Type: numeric
      • +
      +
    • +
    • Argument: include_docs
        +
      • Description: Include the full content of the documents in the return
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Default: false
      • +
      +
    • +
    • Argument: inclusive_end
        +
      • Description: Specifies whether the specified end key should be +included in the result
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Default: true
      • +
      +
    • +
    • Argument: key
        +
      • Description: Return only documents that match the specified key
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    • Argument: limit
        +
      • Description: Limit the number of the returned documents to the +specified number
      • +
      • Optional: yes
      • +
      • Type: numeric
      • +
      +
    • +
    • Argument: reduce
        +
      • Description: Use the reduction function
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Default: true
      • +
      +
    • +
    • Argument: skip
        +
      • Description: Skip this number of records before starting to return +the results
      • +
      • Optional: yes
      • +
      • Type: numeric
      • +
      • Default: 0
      • +
      +
    • +
    • Argument: stale
        +
      • Description: Allow the results from a stale view to be used
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      • Default:
      • +
      • Supported Values:
          +
        • ok: Allow stale views
        • +
        +
      • +
      +
    • +
    • Argument: startkey
        +
      • Description: Return records starting with the specified key
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    • Argument: startkey_docid
        +
      • Description: Return records starting with the specified document ID
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    • Argument: update_seq
        +
      • Description: Include the update sequence in the generated results
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Default: false
      • +
      +
    • +
    +
  • +
+

Executes the specified view-name from the specified design-doc +design document. Unlike the GET method for accessing views, the +POST method supports the specification of explicit keys to be +retrieved from the view results. The remainder of the POST view +functionality is identical to the GET /db/_design/design-doc/_view/view-name API.

+

For example, the request below will return all the recipes where the key +for the view matches either “claret” or “clear apple cider” :

+
POST http://couchdb:5984/recipes/_design/recipes/_view/by_ingredient
+Content-Type: application/json
+
+{
+   "keys" : [
+      "claret",
+      "clear apple juice"
+   ]
+}
+
+

The returned view data contains the standard view information, but only +where the keys match.

+
{
+   "total_rows" : 26484,
+   "rows" : [
+      {
+         "value" : [
+            "Scotch collops"
+         ],
+         "id" : "Scotchcollops",
+         "key" : "claret"
+      },
+      {
+         "value" : [
+            "Stand pie"
+         ],
+         "id" : "Standpie",
+         "key" : "clear apple juice"
+      }
+   ],
+   "offset" : 6324
+}
+
+
+
+

9.4.10.1. Multi-document Fetching

+

By combining the POST method to a given view with the +include_docs=true query argument you can obtain multiple documents +from a database. The result is more efficient than using multiple +GET /db/doc requests.

+

For example, sending the following request for ingredients matching +“claret” and “clear apple juice”:

+
POST http://couchdb:5984/recipes/_design/recipes/_view/by_ingredient?include_docs=true
+Content-Type: application/json
+
+{
+   "keys" : [
+      "claret",
+      "clear apple juice"
+   ]
+}
+
+

Returns the full document for each recipe:

+
{
+   "offset" : 6324,
+   "rows" : [
+      {
+         "doc" : {
+            "_id" : "Scotchcollops",
+            "_rev" : "1-bcbdf724f8544c89697a1cbc4b9f0178",
+            "cooktime" : "8",
+            "ingredients" : [
+               {
+                  "ingredient" : "onion",
+                  "ingredtext" : "onion, peeled and chopped",
+                  "meastext" : "1"
+               },
+            ...
+            ],
+            "keywords" : [
+               "cook method.hob, oven, grill@hob",
+               "diet@wheat-free",
+               "diet@peanut-free",
+               "special collections@classic recipe",
+               "cuisine@british traditional",
+               "diet@corn-free",
+               "diet@citrus-free",
+               "special collections@very easy",
+               "diet@shellfish-free",
+               "main ingredient@meat",
+               "occasion@christmas",
+               "meal type@main",
+               "diet@egg-free",
+               "diet@gluten-free"
+            ],
+            "preptime" : "10",
+            "servings" : "4",
+            "subtitle" : "This recipe comes from an old recipe book of 1683 called 'The Gentlewoman's Kitchen'. This is an excellent way of making a rich and full-flavoured meat dish in a very short time.",
+            "title" : "Scotch collops",
+            "totaltime" : "18"
+         },
+         "id" : "Scotchcollops",
+         "key" : "claret",
+         "value" : [
+            "Scotch collops"
+         ]
+      },
+      {
+         "doc" : {
+            "_id" : "Standpie",
+            "_rev" : "1-bff6edf3ca2474a243023f2dad432a5a",
+            "cooktime" : "92",
+            "ingredients" : [
+...            ],
+            "keywords" : [
+               "diet@dairy-free",
+               "diet@peanut-free",
+               "special collections@classic recipe",
+               "cuisine@british traditional",
+               "diet@corn-free",
+               "diet@citrus-free",
+               "occasion@buffet party",
+               "diet@shellfish-free",
+               "occasion@picnic",
+               "special collections@lunchbox",
+               "main ingredient@meat",
+               "convenience@serve with salad for complete meal",
+               "meal type@main",
+               "cook method.hob, oven, grill@hob / oven",
+               "diet@cow dairy-free"
+            ],
+            "preptime" : "30",
+            "servings" : "6",
+            "subtitle" : "Serve this pie with pickled vegetables and potato salad.",
+            "title" : "Stand pie",
+            "totaltime" : "437"
+         },
+         "id" : "Standpie",
+         "key" : "clear apple juice",
+         "value" : [
+            "Stand pie"
+         ]
+      }
+   ],
+   "total_rows" : 26484
+}
+
+
+
+
+
+

9.4.11. GET /db/_design/design-doc/_show/show-name

+
    +
  • Method: GET /db/_design/design-doc/_show/show-name
  • +
  • Request: None
  • +
  • Response: Returns the result of the show
  • +
  • Admin Privileges Required: no
  • +
  • Query Arguments:
      +
    • Argument: details
        +
      • Description: Indicates whether details should be included
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    • Argument: format
        +
      • Description: Format of the returned information
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    +
  • +
+
+
+

9.4.12. POST /db/_design/design-doc/_show/show-name/doc

+
    +
  • Method: POST /db/_design/design-doc/_show/show-name
  • +
  • Request: Custom data
  • +
  • Response: Returns the result of the show
  • +
  • Admin Privileges Required: no
  • +
+
+
+

9.4.13. GET /db/_design/design-doc/_list/list-name/other-design-doc/view-name

+
    +
  • Method: GET /db/_design/design-doc/_list/list-name/other-design-doc/view-name
  • +
  • Request: TBC
  • +
  • Response: TBC
  • +
  • Admin Privileges Required: no
  • +
+
+
+

9.4.14. POST /db/_design/design-doc/_list/list-name/other-design-doc/view-name

+
    +
  • Method: POST /db/_design/design-doc/_list/list-name/other-design-doc/view-name
  • +
  • Request: TBC
  • +
  • Response: TBC
  • +
  • Admin Privileges Required: no
  • +
+
+
+

9.4.15. GET /db/_design/design-doc/_list/list-name/view-name

+
    +
  • Method: GET /db/_design/design-doc/_list/list-name/view-name
  • +
  • Request: TBC
  • +
  • Response: TBC
  • +
  • Admin Privileges Required: no
  • +
+
+
+

9.4.16. POST /db/_design/design-doc/_list/list-name/view-name

+
    +
  • Method: POST /db/_design/design-doc/_list/list-name/view-name
  • +
  • Request: TBC
  • +
  • Response: TBC
  • +
  • Admin Privileges Required: no
  • +
+
+
+

9.4.17. PUT /db/_design/design-doc/_update/updatename/doc

+
    +
  • Method: POST /db/_design/design-doc/_update/updatename/doc
  • +
  • Request: TBC
  • +
  • Response: TBC
  • +
  • Admin Privileges Required: no
  • +
+
+
+

9.4.18. POST /db/_design/design-doc/_update/updatename

+
    +
  • Method: PUT /db/_design/design-doc/_update/updatename/doc
  • +
  • Request: TBC
  • +
  • Response: TBC
  • +
  • Admin Privileges Required: no
  • +
+
+
+

9.4.19. ALL /db/_design/design-doc/_rewrite/rewrite-name/anything

+
    +
  • Method: ALL /db/_design/design-doc/_rewrite/rewrite-name/anything
  • +
  • Request: TBC
  • +
  • Response: TBC
  • +
  • Admin Privileges Required: no
  • +
+
+
+ + +
+
+
+
+
+ + + + + +

Table Of Contents

+ + +

Previous topic

+

9.3. Local (non-replicating) Document Methods

+

Next topic

+

9.5. Miscellaneous Methods

+ + +

Utilities

+ + + + +

More Help

+ + +
+
+
+
+ + + + \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/api/documents.html couchdb-1.4.0~rc.1/share/doc/build/html/api/documents.html --- couchdb-1.2.0/share/doc/build/html/api/documents.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/api/documents.html 2013-08-23 10:58:46.000000000 -0400 @@ -0,0 +1,1136 @@ + + + + + + + + + + 9.2. Document Methods — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

9.2. Document Methods

+

The CouchDB API Server Document methods detail how to create, read, +update and delete documents within a database.

+

A list of the available methods and URL paths are provided below:

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
MethodPathDescription
POST/dbCreate a new document
GET/db/docReturns the latest revision of the +document
HEAD/db/docReturns bare information in the HTTP +Headers for the document
PUT/db/docInserts a new document, or new version +of an existing document
DELETE/db/docDeletes the document
COPY/db/docCopies the document
GET/db/doc/attachmentGets the attachment of a document
PUT/db/doc/attachmentAdds an attachment of a document
DELETE/db/doc/attachmentDeletes an attachment of a document
+
+

9.2.1. POST /db

+
    +
  • Method: POST /db
  • +
  • Request: JSON of the new document
  • +
  • Response: JSON with the committed document information
  • +
  • Admin Privileges Required: no
  • +
  • Query Arguments:
      +
    • Argument: batch
        +
      • Description: Allow document store request to be batched with others
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      • Supported Values: asd
      • +
      • ok: Enable
      • +
      +
    • +
    +
  • +
  • Return Codes:
      +
    • 201: +Document has been created successfully
    • +
    • 409: +Conflict - a document with the specified document ID already exists
    • +
    +
  • +
+

Create a new document in the specified database, using the supplied JSON +document structure. If the JSON structure includes the _id field, +then the document will be created with the specified document ID. If the +_id field is not specified, a new unique ID will be generated.

+

For example, you can generate a new document with a generated UUID using +the following request:

+
POST http://couchdb:5984/recipes/
+Content-Type: application/json
+
+{
+   "servings" : 4,
+   "subtitle" : "Delicious with fresh bread",
+   "title" : "Fish Stew"
+}
+
+

The return JSON will specify the automatically generated ID and revision +information:

+
{
+   "id" : "64575eef70ab90a2b8d55fc09e00440d",
+   "ok" : true,
+   "rev" : "1-9c65296036141e575d32ba9c034dd3ee"
+}
+
+
+
+

9.2.1.1. Specifying the Document ID

+

The document ID can be specified by including the _id field in the +JSON of the submitted record. The following request will create the same +document with the ID FishStew:

+
POST http://couchdb:5984/recipes/
+Content-Type: application/json
+
+{
+   "_id" : "FishStew",
+   "servings" : 4,
+   "subtitle" : "Delicious with fresh bread",
+   "title" : "Fish Stew"
+}
+
+

The structure of the submitted document is as shown in the table below:

+

In either case, the returned JSON will specify the document ID, revision +ID, and status message:

+
{
+   "id" : "FishStew",
+   "ok" : true,
+   "rev" : "1-9c65296036141e575d32ba9c034dd3ee"
+}
+
+
+
+
+

9.2.1.2. UUID generation algorithms

+

CouchDB supports a number of different UUID generation algorithms for use +in situations where a user-specified UUID does not make sense. These +can be set simply by PUT http://couchdb:5984/_config/uuids/algorithm.

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
AlgorithmDescriptionSample UUID
random128 bits of pure +random awesomeness
    +
  • 43febce5675468a5467fb5467ce9e6c0
  • +
+
sequentialmonotonically +increasing ids with +random increments
    +
  • f755c413badf66b22941313f9f001e28
  • +
  • f755c413badf66b22941313f9f0024ca
  • +
  • f755c413badf66b22941313f9f00332c
  • +
+
utc_randomtime since start of +epoch, as 14 hex +digits, followed by +18 random digits.
    +
  • 04cfa405381205204f75100d0241ccc3
  • +
  • 04cfa4059c48e76e7c054bbe033dd8db
  • +
  • 04cfa405fce10b0df4c08f95e667cd2f
  • +
+
utc_id +& additional +parametertime since start of +epoch, as 14 hex +digits, followed by +utc_id_suffix.
    +
  • 04cfa718b00848_i_am_in_yer_couch
  • +
  • 04cfa71d377aef_i_am_in_yer_couch
  • +
  • 04cfa71e0deabd_i_am_in_yer_couch
  • +
+
+
+
+

9.2.1.3. Batch Mode Writes

+

You can write documents to the database at a higher rate by using the +batch option. This collects document writes together in memory (on a +user-by-user basis) before they are committed to disk. This increases +the risk of the documents not being stored in the event of a failure, +since the documents are not written to disk immediately.

+

To use the batched mode, append the batch=ok query argument to the +URL of the PUT or POST request. The CouchDB server will respond +with a 202 HTTP response code immediately.

+
+
+

9.2.1.4. Including Attachments

+

You can include one or more attachments with a given document by +incorporating the attachment information within the JSON of the +document. This provides a simpler alternative to loading documents with +attachments than making a separate call (see PUT /db/doc/attachment).

+
    +
  • _id (optional): Document ID
  • +
  • _rev (optional): Revision ID (when updating an existing document)
  • +
  • _attachments (optional): Document Attachment
      +
    • filename: Attachment information
        +
      • content_type: MIME Content type string
      • +
      • data: File attachment content, Base64 encoded
      • +
      +
    • +
    +
  • +
+

The filename will be the attachment name. For example, when sending +the JSON structure below:

+
{
+   "_id" : "FishStew",
+   "servings" : 4,
+   "subtitle" : "Delicious with fresh bread",
+   "title" : "Fish Stew"
+   "_attachments" : {
+      "styling.css" : {
+         "content-type" : "text/css",
+         "data" : "cCB7IGZvbnQtc2l6ZTogMTJwdDsgfQo=",
+         },
+   },
+}
+
+
+

The attachment styling.css can be accessed using +/recipes/FishStew/styling.css. For more information on attachments, +see GET /db/doc/attachment.

+

The document data embedded in to the structure must be encoded using +base64.

+
+
+
+

9.2.2. GET /db/doc

+
    +
  • Method: GET /db/doc
  • +
  • Request: None
  • +
  • Response: Returns the JSON for the document
  • +
  • Admin Privileges Required: no
  • +
  • Query Arguments:
      +
    • Argument: conflicts
        +
      • Description: Returns the conflict tree for the document.
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Default: false
      • +
      • Supported Values:
          +
        • true: Includes the revisions
        • +
        +
      • +
      +
    • +
    • Argument: rev
        +
      • Description: Specify the revision to return
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      • Supported Values:
          +
        • true: Includes the revisions
        • +
        +
      • +
      +
    • +
    • Argument: revs
        +
      • Description: Return a list of the revisions for the document
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      +
    • +
    • Argument: revs_info
        +
      • Description: Return a list of detailed revision information for the +document
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Supported Values:
          +
        • true: Includes the revisions
        • +
        +
      • +
      +
    • +
    +
  • +
  • Return Codes:
      +
    • 200: +Document retrieved
    • +
    • 400: +The format of the request or revision was invalid
    • +
    • 404: +The specified document or revision cannot be found, or has been deleted
    • +
    • 409: +Conflict - a document with the specified document ID already exists
    • +
    +
  • +
+

Returns the specified doc from the specified db. For example, to +retrieve the document with the id FishStew you would send the +following request:

+
GET http://couchdb:5984/recipes/FishStew
+Content-Type: application/json
+Accept: application/json
+
+

The returned JSON is the JSON of the document, including the document ID +and revision number:

+
{
+   "_id" : "FishStew",
+   "_rev" : "3-a1a9b39ee3cc39181b796a69cb48521c",
+   "servings" : 4,
+   "subtitle" : "Delicious with a green salad",
+   "title" : "Irish Fish Stew"
+}
+
+
+

Unless you request a specific revision, the latest revision of the +document will always be returned.

+
+

9.2.2.1. Attachments

+

If the document includes attachments, then the returned structure will +contain a summary of the attachments associated with the document, but +not the attachment data itself.

+

The JSON for the returned document will include the _attachments +field, with one or more attachment definitions. For example:

+
{
+   "_id" : "FishStew",
+   "servings" : 4,
+   "subtitle" : "Delicious with fresh bread",
+   "title" : "Fish Stew"
+   "_attachments" : {
+      "styling.css" : {
+         "stub" : true,
+         "content-type" : "text/css",
+         "length" : 783426,
+         },
+   },
+}
+
+
+

The format of the returned JSON is shown in the table below:

+
    +
  • _id (optional): Document ID
  • +
  • _rev (optional): Revision ID (when updating an existing document)
  • +
  • _attachments (optional): Document Attachment
      +
    • filename: Attachment information
        +
      • content_type: MIME Content type string
      • +
      • length: Length (bytes) of the attachment data
      • +
      • revpos: Revision where this attachment exists
      • +
      • stub: Indicates whether the attachment is a stub
      • +
      +
    • +
    +
  • +
+
+
+

9.2.2.2. Getting a List of Revisions

+

You can obtain a list of the revisions for a given document by adding +the revs=true parameter to the request URL. For example:

+
GET http://couchdb:5984/recipes/FishStew?revs=true
+Accept: application/json
+
+

The returned JSON structure includes the original document, including a +_revisions structure that includes the revision information:

+
{
+   "servings" : 4,
+   "subtitle" : "Delicious with a green salad",
+   "_id" : "FishStew",
+   "title" : "Irish Fish Stew",
+   "_revisions" : {
+      "ids" : [
+         "a1a9b39ee3cc39181b796a69cb48521c",
+         "7c4740b4dcf26683e941d6641c00c39d",
+         "9c65296036141e575d32ba9c034dd3ee"
+      ],
+      "start" : 3
+   },
+   "_rev" : "3-a1a9b39ee3cc39181b796a69cb48521c"
+}
+
+
+
    +
  • _id (optional): Document ID
  • +
  • _rev (optional): Revision ID (when updating an existing document)
  • +
  • _revisions: CouchDB Document Revisions
      +
    • ids [array]: Array of valid revision IDs, in reverse order +(latest first)
    • +
    • start: Prefix number for the latest revision
    • +
    +
  • +
+
+
+

9.2.2.3. Obtaining an Extended Revision History

+

You can get additional information about the revisions for a given +document by supplying the revs_info argument to the query:

+
GET http://couchdb:5984/recipes/FishStew?revs_info=true
+Accept: application/json
+
+

This returns extended revision information, including the availability +and status of each revision:

+
{
+   "servings" : 4,
+   "subtitle" : "Delicious with a green salad",
+   "_id" : "FishStew",
+   "_revs_info" : [
+      {
+         "status" : "available",
+         "rev" : "3-a1a9b39ee3cc39181b796a69cb48521c"
+      },
+      {
+         "status" : "available",
+         "rev" : "2-7c4740b4dcf26683e941d6641c00c39d"
+      },
+      {
+         "status" : "available",
+         "rev" : "1-9c65296036141e575d32ba9c034dd3ee"
+      }
+   ],
+   "title" : "Irish Fish Stew",
+   "_rev" : "3-a1a9b39ee3cc39181b796a69cb48521c"
+}
+
+
+
    +
  • _id (optional): Document ID
  • +
  • _rev (optional): Revision ID (when updating an existing document)
  • +
  • _revs_info [array]: CouchDB Document Extended Revision Info
      +
    • rev: Full revision string
    • +
    • status: Status of the revision
    • +
    +
  • +
+
+
+

9.2.2.4. Obtaining a Specific Revision

+

To get a specific revision, use the rev argument to the request, and +specify the full revision number:

+
GET http://couchdb:5984/recipes/FishStew?rev=2-7c4740b4dcf26683e941d6641c00c39d
+Accept: application/json
+
+

The specified revision of the document will be returned, including a +_rev field specifying the revision that was requested:

+
{
+   "_id" : "FishStew",
+   "_rev" : "2-7c4740b4dcf26683e941d6641c00c39d",
+   "servings" : 4,
+   "subtitle" : "Delicious with a green salad",
+   "title" : "Fish Stew"
+}
+
+
+
+
+
+

9.2.3. HEAD /db/doc

+
    +
  • Method: HEAD /db/doc
  • +
  • Request: None
  • +
  • Response: None
  • +
  • Admin Privileges Required: no
  • +
  • Query Arguments:
      +
    • Argument: rev
        +
      • Description: Specify the revision to return
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    • Argument: revs
        +
      • Description: Return a list of the revisions for the document
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      +
    • +
    • Argument: revs_info
        +
      • Description: Return a list of detailed revision information for the +document
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      +
    • +
    +
  • +
  • Return Codes:
      +
    • 404: +The specified document or revision cannot be found, or has been deleted
    • +
    +
  • +
+

Returns the HTTP Headers containing a minimal amount of information +about the specified document. The method supports the same query +arguments as the GET method, but only the header information +(including document size, and the revision as an ETag), is returned. For +example, a simple HEAD request:

+
HEAD http://couchdb:5984/recipes/FishStew
+Content-Type: application/json
+
+

Returns the following HTTP Headers:

+
HTTP/1.1 200 OK
+Server: CouchDB/1.0.1 (Erlang OTP/R13B)
+Etag: "7-a19a1a5ecd946dad70e85233ba039ab2"
+Date: Fri, 05 Nov 2010 14:54:43 GMT
+Content-Type: text/plain;charset=utf-8
+Content-Length: 136
+Cache-Control: must-revalidate
+
+
+

The Etag header shows the current revision for the requested +document, and the Content-Length specifies the length of the data, +if the document were requested in full.

+

Adding any of the query arguments (as supported by `GET```_ method), +then the resulting HTTP Headers will correspond to what would be +returned. Note that the current revision is not returned when the +``refs_info argument is used. For example:

+
HTTP/1.1 200 OK
+Server: CouchDB/1.0.1 (Erlang OTP/R13B)
+Date: Fri, 05 Nov 2010 14:57:16 GMT
+Content-Type: text/plain;charset=utf-8
+Content-Length: 609
+Cache-Control: must-revalidate
+
+
+
+
+

9.2.4. PUT /db/doc

+
    +
  • Method: PUT /db/doc
  • +
  • Request: JSON of the new document, or updated version of the existed +document
  • +
  • Response: JSON of the document ID and revision
  • +
  • Admin Privileges Required: no
  • +
  • Query Arguments:
      +
    • Argument: batch
        +
      • Description: Allow document store request to be batched with others
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      • Supported Values:
          +
        • ok: Enable
        • +
        +
      • +
      +
    • +
    +
  • +
  • HTTP Headers
      +
    • Header: If-Match
        +
      • Description: Current revision of the document for validation
      • +
      • Optional: yes
      • +
      +
    • +
    +
  • +
  • Return Codes:
      +
    • 201: +Document has been created successfully
    • +
    • 202: +Document accepted for writing (batch mode)
    • +
    +
  • +
+

The PUT method creates a new named document, or creates a new +revision of the existing document. Unlike the POST method, you +must specify the document ID in the request URL.

+

For example, to create the document FishStew, you would send the +following request:

+
PUT http://couchdb:5984/recipes/FishStew
+Content-Type: application/json
+
+{
+  "servings" : 4,
+  "subtitle" : "Delicious with fresh bread",
+  "title" : "Fish Stew"
+}
+
+

The return type is JSON of the status, document ID,and revision number:

+
{
+   "id" : "FishStew",
+   "ok" : true,
+   "rev" : "1-9c65296036141e575d32ba9c034dd3ee"
+}
+
+
+
+

9.2.4.1. Updating an Existing Document

+

To update an existing document you must specify the current revision +number within the _rev parameter. For example:

+
PUT http://couchdb:5984/recipes/FishStew
+Content-Type: application/json
+
+{
+  "_rev" : "1-9c65296036141e575d32ba9c034dd3ee",
+  "servings" : 4,
+  "subtitle" : "Delicious with fresh salad",
+  "title" : "Fish Stew"
+}
+
+

Alternatively, you can supply the current revision number in the +If-Match HTTP header of the request. For example:

+
PUT http://couchdb:5984/recipes/FishStew
+If-Match: 2-d953b18035b76f2a5b1d1d93f25d3aea
+Content-Type: application/json
+
+{
+   "servings" : 4,
+   "subtitle" : "Delicious with fresh salad",
+   "title" : "Fish Stew"
+}
+
+

The JSON returned will include the updated revision number:

+
{
+   "id" : "FishStew99",
+   "ok" : true,
+   "rev" : "2-d953b18035b76f2a5b1d1d93f25d3aea"
+}
+
+
+

For information on batched writes, which can provide improved +performance, see UUID generation algorithms.

+
+
+
+

9.2.5. DELETE /db/doc

+
    +
  • Method: DELETE /db/doc
  • +
  • Request: None
  • +
  • Response: JSON of the deleted revision
  • +
  • Admin Privileges Required: no
  • +
  • Query Arguments:
      +
    • Argument: rev
        +
      • Description: Current revision of the document for validation
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    +
  • +
  • HTTP Headers
      +
    • Header: If-Match
        +
      • Description: Current revision of the document for validation
      • +
      • Optional: yes
      • +
      +
    • +
    +
  • +
  • Return Codes:
      +
    • 409: +Revision is missing, invalid or not the latest
    • +
    +
  • +
+

Deletes the specified document from the database. You must supply the +current (latest) revision, either by using the rev parameter to +specify the revision:

+
DELETE http://couchdb:5984/recipes/FishStew?rev=3-a1a9b39ee3cc39181b796a69cb48521c
+Content-Type: application/json
+
+

Alternatively, you can use ETags with the If-Match field:

+
DELETE http://couchdb:5984/recipes/FishStew
+If-Match: 3-a1a9b39ee3cc39181b796a69cb48521c
+Content-Type: application/json
+
+

The returned JSON contains the document ID, revision and status:

+
{
+   "id" : "FishStew",
+   "ok" : true,
+   "rev" : "4-2719fd41187c60762ff584761b714cfb"
+}
+
+
+
+

Note

+

Note that deletion of a record increments the revision number. The +use of a revision for deletion of the record allows replication of +the database to correctly track the deletion in synchronized copies.

+
+
+
+

9.2.6. COPY /db/doc

+
    +
  • Method: COPY /db/doc
  • +
  • Request: None
  • +
  • Response: JSON of the new document and revision
  • +
  • Admin Privileges Required: no
  • +
  • Query Arguments:
      +
    • Argument: rev
        +
      • Description: Revision to copy from
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    +
  • +
  • HTTP Headers
      +
    • Header: Destination
        +
      • Description: Destination document (and optional revision)
      • +
      • Optional: no
      • +
      +
    • +
    +
  • +
  • Return Codes:
      +
    • 201: +Document has been copied and created successfully
    • +
    • 409: +Revision is missing, invalid or not the latest
    • +
    +
  • +
+

The COPY command (which is non-standard HTTP) copies an existing +document to a new or existing document.

+

The source document is specified on the request line, with the +Destination HTTP Header of the request specifying the target +document.

+
+

9.2.6.1. Copying a Document

+

You can copy the latest version of a document to a new document by +specifying the current document and target document:

+
COPY http://couchdb:5984/recipes/FishStew
+Content-Type: application/json
+Destination: IrishFishStew
+
+

The above request copies the document FishStew to the new document +IrishFishStew. The response is the ID and revision of the new +document.

+
{
+   "id" : "IrishFishStew",
+   "rev" : "1-9c65296036141e575d32ba9c034dd3ee"
+}
+
+
+
+
+

9.2.6.2. Copying from a Specific Revision

+

To copy from a specific version, use the rev argument to the query +string:

+
COPY http://couchdb:5984/recipes/FishStew?rev=5-acfd32d233f07cea4b4f37daaacc0082
+Content-Type: application/json
+Destination: IrishFishStew
+
+

The new document will be created using the information in the specified +revision of the source document.

+
+
+

9.2.6.3. Copying to an Existing Document

+

To copy to an existing document, you must specify the current revision +string for the target document, using the rev parameter to the +Destination HTTP Header string. For example:

+
COPY http://couchdb:5984/recipes/FishStew
+Content-Type: application/json
+Destination: IrishFishStew?rev=1-9c65296036141e575d32ba9c034dd3ee
+
+

The return value will be the new revision of the copied document:

+
{
+   "id" : "IrishFishStew",
+   "rev" : "2-55b6a1b251902a2c249b667dab1c6692"
+}
+
+
+
+
+
+

9.2.7. GET /db/doc/attachment

+
    +
  • Method: GET /db/doc/attachment
  • +
  • Request: None
  • +
  • Response: Returns the attachment data
  • +
  • Admin Privileges Required: no
  • +
+

Returns the file attachment attachment associated with the document +doc. The raw data of the associated attachment is returned (just as +if you were accessing a static file. The returned HTTP Content-type +will be the same as the content type set when the document attachment +was submitted into the database.

+
+
+

9.2.8. PUT /db/doc/attachment

+
    +
  • Method: PUT /db/doc/attachment
  • +
  • Request: Raw document data
  • +
  • Response: JSON document status
  • +
  • Admin Privileges Required: no
  • +
  • Query Arguments:
      +
    • Argument: rev
        +
      • Description: Current document revision
      • +
      • Optional: no
      • +
      • Type: string
      • +
      +
    • +
    +
  • +
  • HTTP Headers
      +
    • Header: Content-Length
        +
      • Description: Length (bytes) of the attachment being uploaded
      • +
      • Optional: no
      • +
      +
    • +
    • Header: Content-Type
        +
      • Description: MIME type for the uploaded attachment
      • +
      • Optional: no
      • +
      +
    • +
    • Header: If-Match
        +
      • Description: Current revision of the document for validation
      • +
      • Optional: yes
      • +
      +
    • +
    +
  • +
  • Return Codes:
      +
    • 201: +Attachment has been accepted
    • +
    +
  • +
+

Upload the supplied content as an attachment to the specified document +(doc). The attachment name provided must be a URL encoded +string. You must also supply either the rev query argument or the +If-Match HTTP header for validation, and the HTTP headers (to set +the attachment content type). The content type is used when the +attachment is requested as the corresponding content-type in the +returned document header.

+

For example, you could upload a simple text document using the following +request:

+
PUT http://couchdb:5984/recipes/FishStew/basic?rev=8-a94cb7e50ded1e06f943be5bfbddf8ca
+Content-Length: 10
+Content-Type: text/plain
+
+Roast it
+
+

Or by using the If-Match HTTP header:

+
PUT http://couchdb:5984/recipes/FishStew/basic
+If-Match: 8-a94cb7e50ded1e06f943be5bfbddf8ca
+Content-Length: 10
+Content-Type: text/plain
+
+Roast it
+
+

The returned JSON contains the new document information:

+
{
+   "id" : "FishStew",
+   "ok" : true,
+   "rev" : "9-247bb19a41bfd9bfdaf5ee6e2e05be74"
+}
+
+
+
+

Note

+

Uploading an attachment updates the corresponding document revision. +Revisions are tracked for the parent document, not individual +attachments.

+
+
+

9.2.8.1. Updating an Existing Attachment

+

Uploading an attachment using an existing attachment name will update +the corresponding stored content of the database. Since you must supply +the revision information to add an attachment to a document, this serves +as validation to update the existing attachment.

+
+
+
+

9.2.9. DELETE /db/doc/attachment

+
    +
  • Method: DELETE /db/doc/attachment
  • +
  • Request: None
  • +
  • Response: JSON status
  • +
  • Admin Privileges Required: no
  • +
  • Query Arguments:
      +
    • Argument: rev
        +
      • Description: Current document revision
      • +
      • Optional: no
      • +
      • Type: string
      • +
      +
    • +
    +
  • +
  • HTTP Headers
      +
    • Header: If-Match
        +
      • Description: Current revision of the document for validation
      • +
      • Optional: yes
      • +
      +
    • +
    +
  • +
  • Return Codes:
      +
    • 200: +Attachment deleted successfully
    • +
    • 409: +Supplied revision is incorrect or missing
    • +
    +
  • +
+

Deletes the attachment attachment to the specified doc. You must +supply the rev argument with the current revision to delete the +attachment.

+

For example to delete the attachment basic from the recipe +FishStew:

+
DELETE http://couchdb:5984/recipes/FishStew/basic?rev=9-247bb19a41bfd9bfdaf5ee6e2e05be74
+Content-Type: application/json
+
+

The returned JSON contains the updated revision information:

+
{
+   "id" : "FishStew",
+   "ok" : true,
+   "rev" : "10-561bf6b1e27615cee83d1f48fa65dd3e"
+}
+
+
+
+
+ + +
+
+
+ +
+
+ + + + \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/api/local.html couchdb-1.4.0~rc.1/share/doc/build/html/api/local.html --- couchdb-1.2.0/share/doc/build/html/api/local.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/api/local.html 2013-08-23 10:58:47.000000000 -0400 @@ -0,0 +1,363 @@ + + + + + + + + + + 9.3. Local (non-replicating) Document Methods — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

9.3. Local (non-replicating) Document Methods

+

The Local (non-replicating) document interface allows you to create +local documents that are not replicated to other databases. These +documents can be used to hold configuration or other information that is +required specifically on the local CouchDB instance.

+

Local documents have the following limitations:

+
    +
  • Local documents are not replicated to other databases.
  • +
  • The ID of the local document must be known for the document to +accessed. You cannot obtain a list of local documents from the +database.
  • +
  • Local documents are not output by views, or the _all_docs view.
  • +
+

Local documents can be used when you want to store configuration or +other information for the current (local) instance of a given database.

+

A list of the available methods and URL paths are provided below:

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
MethodPathDescription
GET/db/_local/local-docReturns the latest revision of the +non-replicated document
PUT/db/_local/local-docInserts a new version of the +non-replicated document
DELETE/db/_local/local-docDeletes the non-replicated document
COPY/db/_local/local-docCopies the non-replicated document
+
+

9.3.1. GET /db/_local/local-doc

+
    +
  • Method: GET /db/_local/local-doc
  • +
  • Request: None
  • +
  • Response: JSON of the returned document
  • +
  • Admin Privileges Required: no
  • +
  • Query Arguments:
      +
    • Argument: rev
        +
      • Description: Specify the revision to return
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      • Supported Values:
          +
        • true: Includes the revisions
        • +
        +
      • +
      +
    • +
    • Argument: revs
        +
      • Description: Return a list of the revisions for the document
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      +
    • +
    • Argument: revs_info
        +
      • Description: Return a list of detailed revision information for +the document
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Supported Values
          +
        • true: Includes the revisions
        • +
        +
      • +
      +
    • +
    +
  • +
  • Return Codes:
      +
    • 400: +The format of the request or revision was invalid
    • +
    • 404: +The specified document or revision cannot be found, or has been deleted
    • +
    +
  • +
+

Gets the specified local document. The semantics are identical to +accessing a standard document in the specified database, except that the +document is not replicated. See GET /db/doc.

+
+
+

9.3.2. PUT /db/_local/local-doc

+
    +
  • Method: PUT /db/_local/local-doc
  • +
  • Request: JSON of the document
  • +
  • Response: JSON with the committed document information
  • +
  • Admin Privileges Required: no
  • +
  • Return Codes:
      +
    • 201: +Document has been created successfully
    • +
    +
  • +
+

Stores the specified local document. The semantics are identical to +storing a standard document in the specified database, except that the +document is not replicated. See PUT /db/doc.

+
+
+

9.3.3. DELETE /db/_local/local-doc

+
    +
  • Method: DELETE /db/_local/local-doc
  • +
  • Request: None
  • +
  • Response: JSON with the deleted document information
  • +
  • Admin Privileges Required: no
  • +
  • Query Arguments:
      +
    • Argument: rev
        +
      • Description: Current revision of the document for validation
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    +
  • +
  • HTTP Headers
      +
    • Header: If-Match
        +
      • Description: Current revision of the document for validation
      • +
      • Optional: yes
      • +
      +
    • +
    +
  • +
  • Return Codes:
      +
    • 409: +Supplied revision is incorrect or missing
    • +
    +
  • +
+

Deletes the specified local document. The semantics are identical to +deleting a standard document in the specified database, except that the +document is not replicated. See DELETE /db/doc.

+
+
+

9.3.4. COPY /db/_local/local-doc

+
    +
  • Method: COPY /db/_local/local-doc
  • +
  • Request: None
  • +
  • Response: JSON of the copied document
  • +
  • Admin Privileges Required: no
  • +
  • Query Arguments:
      +
    • Argument: rev
        +
      • Description: Revision to copy from
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      +
    • +
    +
  • +
  • HTTP Headers
      +
    • Header: Destination
        +
      • Description: Destination document (and optional revision)
      • +
      • Optional: no
      • +
      +
    • +
    +
  • +
+

Copies the specified local document. The semantics are identical to +copying a standard document in the specified database, except that the +document is not replicated. See COPY /db/doc.

+
+
+ + +
+
+
+
+ +
+
+
+ + + + \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/api/misc.html couchdb-1.4.0~rc.1/share/doc/build/html/api/misc.html --- couchdb-1.2.0/share/doc/build/html/api/misc.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/api/misc.html 2013-08-23 10:58:49.000000000 -0400 @@ -0,0 +1,1222 @@ + + + + + + + + + + 9.5. Miscellaneous Methods — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

9.5. Miscellaneous Methods

+

The CouchDB Miscellaneous interface provides the basic interface to a +CouchDB server for obtaining CouchDB information and getting and setting +configuration information.

+

A list of the available methods and URL paths are provided below:

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
MethodPathDescription
GET/Get the welcome message and version +information
GET/_active_tasksObtain a list of the tasks running in the +server
GET/_all_dbsGet a list of all the DBs
GET/_db_updatesA feed of database events
GET/_logReturn the server log file
POST/_replicateSet or cancel replication
POST/_restartRestart the server
GET/_statsReturn server statistics
GET/_utilsCouchDB administration interface (Futon)
GET/_uuidsGet generated UUIDs from the server
GET/favicon.icoGet the site icon
+
+

9.5.1. GET /

+
    +
  • Method: GET /
  • +
  • Request: None
  • +
  • Response: Welcome message and version
  • +
  • Admin Privileges Required: no
  • +
  • Return Codes:
      +
    • 200: +Request completed successfully.
    • +
    +
  • +
+

Accessing the root of a CouchDB instance returns meta information about +the instance. The response is a JSON structure containing information +about the server, including a welcome message and the version of the +server.

+
{
+   "couchdb" : "Welcome",
+   "version" : "1.0.1"
+}
+
+
+
+
+

9.5.2. GET /_active_tasks

+
    +
  • Method: GET /_active_tasks
  • +
  • Request: None
  • +
  • Response: List of running tasks, including the task type, name, status +and process ID
  • +
  • Admin Privileges Required: yes
  • +
  • Return Codes:
      +
    • 200: +Request completed successfully.
    • +
    +
  • +
+

You can obtain a list of active tasks by using the /_active_tasks +URL. The result is a JSON array of the currently running tasks, with +each task being described with a single object. For example:

+
[
+   {
+    "pid" : "<0.11599.0>",
+    "status" : "Copied 0 of 18369 changes (0%)",
+    "task" : "recipes",
+    "type" : "Database Compaction"
+    }
+]
+
+
+

The returned structure includes the following fields for each task:

+
    +
  • tasks [array]: Active Task
      +
    • pid:Process ID
    • +
    • status: Task status message
    • +
    • task: Task name
    • +
    • type: Operation Type
    • +
    +
  • +
+

For operation type, valid values include:

+
    +
  • Database Compaction
  • +
  • Replication
  • +
  • View Group Compaction
  • +
  • View Group Indexer
  • +
+
+
+

9.5.3. GET /_all_dbs

+
    +
  • Method: GET /_all_dbs
  • +
  • Request: None
  • +
  • Response: JSON list of DBs
  • +
  • Admin Privileges Required: no
  • +
  • Return Codes:
      +
    • 200: +Request completed successfully.
    • +
    +
  • +
+

Returns a list of all the databases in the CouchDB instance. For +example:

+
GET http://couchdb:5984/_all_dbs
+Accept: application/json
+
+

The return is a JSON array:

+
[
+   "_users",
+   "contacts",
+   "docs",
+   "invoices",
+   "locations"
+]
+
+
+
+
+

9.5.4. GET /_db_updates

+
    +
  • Method: GET /_db_updates
  • +
  • Request: None
  • +
  • Admin Privileges Required: yes
  • +
  • Query ARguments:
      +
    • Argument: feed
        +
      • Descroption: Format of the response feed
      • +
      • Optional: yes
      • +
      • Type: string
      • +
      • Default: longpoll
      • +
      • Supported Values:
          +
        • longpoll: Closes the connection after the first event.
        • +
        • continuous: Send a line of JSON per event. Keeps the socket open until timeout.
        • +
        • eventsource: Like, continuous, but sends the events in EventSource format. See http://dev.w3.org/html5/eventsource/ for details,
        • +
        +
      • +
      +
    • +
    • Argument: timeout
        +
      • Descroption: Number of seconds until CouchDB closes the connection.
      • +
      • Optional: yes
      • +
      • Type: numeric
      • +
      • Default: 60
      • +
      +
    • +
    • Argument: heartbeat
        +
      • Descroption: Whether CouchDB will send a newline character (\n) on timeout.
      • +
      • Optional: yes
      • +
      • Type: boolean
      • +
      • Default: true
      • +
      +
    • +
    +
  • +
  • Return Codes:
      +
    • 200 +Request completed successfully.
    • +
    +
  • +
+

Returns a list of all database events in the CouchDB instance.

+

A database event is one of created, updated, deleted.

+

For example:

+
GET http://couchdb:5984/_db_events?feed=continuous
+Accept: application/json
+
+
{"dbname":"my-database", "type":"created"}
+{"dbname":"my-database", "type":"updated"}
+{"dbname":"another-database", "type":"created"}
+{"dbname":"my-database", "type":"deleted"}
+{"dbname":"another-database", "type":"updated"}
+
+
+
+
+

9.5.5. GET /_log

+
    +
  • Method: GET /_log
  • +
  • Request: None
  • +
  • Response: Log content
  • +
  • Admin Privileges Required: yes
  • +
  • Query Arguments:
      +
    • Argument: bytes
        +
      • Description: Bytes to be returned
      • +
      • Optional: yes
      • +
      • Type: numeric
      • +
      • Default: 1000
      • +
      +
    • +
    • Argument: offset
        +
      • Description: Offset in bytes where the log tail should be started
      • +
      • Optional: yes
      • +
      • Type: numeric
      • +
      • Default: 0
      • +
      +
    • +
    +
  • +
  • Return Codes:
      +
    • 200: +Request completed successfully.
    • +
    +
  • +
+

Gets the CouchDB log, equivalent to accessing the local log file of the +corresponding CouchDB instance.

+

When you request the log, the response is returned as plain (UTF-8) +text, with an HTTP Content-type header as text/plain.

+

For example, the request:

+
GET http://couchdb:5984/_log
+Accept: */*
+
+

The raw text is returned:

+
[Wed, 27 Oct 2010 10:49:42 GMT] [info] [<0.23338.2>] 192.168.0.2 - - 'PUT' /authdb 401
+[Wed, 27 Oct 2010 11:02:19 GMT] [info] [<0.23428.2>] 192.168.0.116 - - 'GET' /recipes/FishStew 200
+[Wed, 27 Oct 2010 11:02:19 GMT] [info] [<0.23428.2>] 192.168.0.116 - - 'GET' /_session 200
+[Wed, 27 Oct 2010 11:02:19 GMT] [info] [<0.24199.2>] 192.168.0.116 - - 'GET' / 200
+[Wed, 27 Oct 2010 13:03:38 GMT] [info] [<0.24207.2>] 192.168.0.116 - - 'GET' /_log?offset=5 200
+
+
+

If you want to pick out specific parts of the log information you can +use the bytes argument, which specifies the number of bytes to be +returned, and offset, which specifies where the reading of the log +should start, counted back from the end. For example, if you use the +following request:

+
GET /_log?bytes=500&offset=2000
+
+

Reading of the log will start at 2000 bytes from the end of the log, and +500 bytes will be shown.

+
+
+

9.5.6. POST /_replicate

+
    +
  • Method: POST /_replicate
  • +
  • Request: Replication specification
  • +
  • Response: TBD
  • +
  • Admin Privileges Required: yes
  • +
  • Query Arguments:
      +
    • Argument: bytes
        +
      • Description: Bytes to be returned
      • +
      • Optional: yes
      • +
      • Type: numeric
      • +
      • Default: 1000
      • +
      +
    • +
    • Argument: offset
        +
      • Description: Offset in bytes where the log tail should be started
      • +
      • Optional: yes
      • +
      • Type: numeric
      • +
      • Default: 0
      • +
      +
    • +
    +
  • +
  • Return Codes:
      +
    • 200: +Replication request successfully completed
    • +
    • 202: +Continuous replication request has been accepted
    • +
    • 404: +Either the source or target DB is not found
    • +
    • 500: +JSON specification was invalid
    • +
    +
  • +
+

Request, configure, or stop, a replication operation.

+

The specification of the replication request is controlled through the +JSON content of the request. The JSON should be an object with the +fields defining the source, target and other options. The fields of the +JSON request are shown in the table below:

+
    +
  • cancel (optional): Cancels the replication
  • +
  • continuous (optional): Configure the replication to be continuous
  • +
  • create_target (optional): Creates the target database
  • +
  • doc_ids (optional): Array of document IDs to be synchronized
  • +
  • proxy (optional): Address of a proxy server through which replication +should occur
  • +
  • source: Source database name or URL
  • +
  • target: Target database name or URL
  • +
+
+

9.5.6.1. Replication Operation

+

The aim of the replication is that at the end of the process, all active +documents on the source database are also in the destination database +and all documents that were deleted in the source databases are also +deleted (if they exist) on the destination database.

+

Replication can be described as either push or pull replication:

+
    +
  • Pull replication is where the source is the remote CouchDB +instance, and the destination is the local database.

    +

    Pull replication is the most useful solution to use if your source +database has a permanent IP address, and your destination (local) +database may have a dynamically assigned IP address (for example, +through DHCP). This is particularly important if you are replicating +to a mobile or other device from a central server.

    +
  • +
  • Push replication is where the source is a local database, and +destination is a remote database.

    +
  • +
+
+
+

9.5.6.2. Specifying the Source and Target Database

+

You must use the URL specification of the CouchDB database if you want +to perform replication in either of the following two situations:

+
    +
  • Replication with a remote database (i.e. another instance of CouchDB +on the same host, or a different host)
  • +
  • Replication with a database that requires authentication
  • +
+

For example, to request replication between a database local to the +CouchDB instance to which you send the request, and a remote database +you might use the following request:

+
POST http://couchdb:5984/_replicate
+Content-Type: application/json
+Accept: application/json
+
+{
+   "source" : "recipes",
+   "target" : "http://coucdb-remote:5984/recipes",
+}
+
+

In all cases, the requested databases in the source and target +specification must exist. If they do not, an error will be returned +within the JSON object:

+
{
+   "error" : "db_not_found"
+   "reason" : "could not open http://couchdb-remote:5984/ol1ka/",
+}
+
+
+

You can create the target database (providing your user credentials +allow it) by adding the create_target field to the request object:

+
POST http://couchdb:5984/_replicate
+Content-Type: application/json
+Accept: application/json
+
+{
+   "create_target" : true
+   "source" : "recipes",
+   "target" : "http://couchdb-remote:5984/recipes",
+}
+
+

The create_target field is not destructive. If the database already +exists, the replication proceeds as normal.

+
+
+

9.5.6.3. Single Replication

+

You can request replication of a database so that the two databases can +be synchronized. By default, the replication process occurs one time and +synchronizes the two databases together. For example, you can request a +single synchronization between two databases by supplying the source +and target fields within the request JSON content.

+
POST http://couchdb:5984/_replicate
+Content-Type: application/json
+Accept: application/json
+
+{
+   "source" : "recipes",
+   "target" : "recipes-snapshot",
+}
+
+

In the above example, the databases recipes and recipes-snapshot +will be synchronized. These databases are local to the CouchDB instance +where the request was made. The response will be a JSON structure +containing the success (or failure) of the synchronization process, and +statistics about the process:

+
{
+   "ok" : true,
+   "history" : [
+      {
+         "docs_read" : 1000,
+         "session_id" : "52c2370f5027043d286daca4de247db0",
+         "recorded_seq" : 1000,
+         "end_last_seq" : 1000,
+         "doc_write_failures" : 0,
+         "start_time" : "Thu, 28 Oct 2010 10:24:13 GMT",
+         "start_last_seq" : 0,
+         "end_time" : "Thu, 28 Oct 2010 10:24:14 GMT",
+         "missing_checked" : 0,
+         "docs_written" : 1000,
+         "missing_found" : 1000
+      }
+   ],
+   "session_id" : "52c2370f5027043d286daca4de247db0",
+   "source_last_seq" : 1000
+}
+
+
+

The structure defines the replication status, as described in the table +below:

+
    +
  • history [array]: Replication History
      +
    • doc_write_failures: Number of document write failures
    • +
    • docs_read: Number of documents read
    • +
    • docs_written: Number of documents written to target
    • +
    • end_last_seq: Last sequence number in changes stream
    • +
    • end_time: Date/Time replication operation completed
    • +
    • missing_checked: Number of missing documents checked
    • +
    • missing_found: Number of missing documents found
    • +
    • recorded_seq: Last recorded sequence number
    • +
    • session_id: Session ID for this replication operation
    • +
    • start_last_seq: First sequence number in changes stream
    • +
    • start_time: Date/Time replication operation started
    • +
    +
  • +
  • ok: Replication status
  • +
  • session_id: Unique session ID
  • +
  • source_last_seq: Last sequence number read from source database
  • +
+
+
+

9.5.6.4. Continuous Replication

+

Synchronization of a database with the previously noted methods happens +only once, at the time the replicate request is made. To have the target +database permanently replicated from the source, you must set the +continuous field of the JSON object within the request to true.

+

With continuous replication changes in the source database are +replicated to the target database in perpetuity until you specifically +request that replication ceases.

+
POST http://couchdb:5984/_replicate
+Content-Type: application/json
+Accept: application/json
+
+{
+   "continuous" : true
+   "source" : "recipes",
+   "target" : "http://couchdb-remote:5984/recipes",
+}
+
+

Changes will be replicated between the two databases as long as a +network connection is available between the two instances.

+
+

Note

+

Two keep two databases synchronized with each other, you need to set +replication in both directions; that is, you must replicate from +databasea to databaseb, and separately from databaseb to +databasea.

+
+
+
+

9.5.6.5. Canceling Continuous Replication

+

You can cancel continuous replication by adding the cancel field to +the JSON request object and setting the value to true. Note that the +structure of the request must be identical to the original for the +cancellation request to be honoured. For example, if you requested +continuous replication, the cancellation request must also contain the +continuous field.

+

For example, the replication request:

+
POST http://couchdb:5984/_replicate
+Content-Type: application/json
+Accept: application/json
+
+{
+   "source" : "recipes",
+   "target" : "http://couchdb-remote:5984/recipes",
+   "create_target" : true,
+   "continuous" : true
+}
+
+

Must be canceled using the request:

+
POST http://couchdb:5984/_replicate
+Content-Type: application/json
+Accept: application/json
+
+{
+    "cancel" : true,
+    "continuous" : true
+    "create_target" : true,
+    "source" : "recipes",
+    "target" : "http://couchdb-remote:5984/recipes",
+}
+
+

Requesting cancellation of a replication that does not exist results in +a 404 error.

+
+
+
+

9.5.7. POST /_restart

+
    +
  • Method: POST /_restart
  • +
  • Request: None
  • +
  • Response: JSON status message
  • +
  • Admin Privileges Required: yes
  • +
  • HTTP Headers:
      +
    • Header: Content-Type
        +
      • Description: Request content type
      • +
      • Optional: no
      • +
      • Value: application/json
      • +
      +
    • +
    +
  • +
  • Return Codes:
      +
    • 200: +Replication request successfully completed
    • +
    +
  • +
+

Restarts the CouchDB instance. You must be authenticated as a user with +administration privileges for this to work.

+

For example:

+
POST http://admin:password@couchdb:5984/_restart
+
+

The return value (if the server has not already restarted) is a JSON +status object indicating that the request has been received:

+
{
+   "ok" : true,
+}
+
+
+

If the server has already restarted, the header may be returned, but no +actual data is contained in the response.

+
+
+

9.5.8. GET /_stats

+
    +
  • Method: GET /_stats
  • +
  • Request: None
  • +
  • Response: Server statistics
  • +
  • Admin Privileges Required: no
  • +
  • Return Codes:
      +
    • 200: +Request completed successfully.
    • +
    +
  • +
+

The _stats method returns a JSON object containing the statistics +for the running server. The object is structured with top-level sections +collating the statistics for a range of entries, with each individual +statistic being easily identified, and the content of each statistic is +self-describing. For example, the request time statistics, within the +couchdb section are structured as follows:

+
{
+   "couchdb" : {
+...
+      "request_time" : {
+         "stddev" : "27.509",
+         "min" : "0.333333333333333",
+         "max" : "152",
+         "current" : "400.976",
+         "mean" : "10.837",
+         "sum" : "400.976",
+         "description" : "length of a request inside CouchDB without MochiWeb"
+      },
+...
+    }
+}
+
+
+

The fields provide the current, minimum and maximum, and a collection of +statistical means and quantities. The quantity in each case is not +defined, but the descriptions below provide

+

The statistics are divided into the following top-level sections:

+
    +
  • couchdb: Describes statistics specific to the internals of CouchDB.

    + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

    Statistic ID

    +

    Description

    +

    Unit

    +

    auth_cache_hits

    +

    Number of authentication cache hits

    +

    number

    +

    auth_cache_misses

    +

    Number of authentication cache misses

    +

    number

    +

    database_reads

    +

    Number of times a document was read from a database

    +

    number

    +

    database_writes

    +

    Number of times a database was changed

    +

    number

    +

    open_databases

    +

    Number of open databases

    +

    number

    +

    open_os_files

    +

    Number of file descriptors CouchDB has open

    +

    number

    +

    request_time

    +

    Length of a request inside CouchDB without MochiWeb

    +

    milliseconds

    +
    +
  • +
  • httpd_request_methods

    + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

    Statistic ID

    +

    Description

    +

    Unit

    +

    COPY

    +

    Number of HTTP COPY requests

    +

    number

    +

    DELETE

    +

    Number of HTTP DELETE requests

    +

    number

    +

    GET

    +

    Number of HTTP GET requests

    +

    number

    +

    HEAD

    +

    Number of HTTP HEAD requests

    +

    number

    +

    POST

    +

    Number of HTTP POST requests

    +

    number

    +

    PUT

    +

    Number of HTTP PUT requests

    +

    number

    +
    +
  • +
  • httpd_status_codes

    + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

    Statistic ID

    +

    Description

    +

    Unit

    +

    200

    +

    Number of HTTP 200 OK responses

    +

    number

    +

    201

    +

    Number of HTTP 201 Created responses

    +

    number

    +

    202

    +

    Number of HTTP 202 Accepted responses

    +

    number

    +

    301

    +

    Number of HTTP 301 Moved Permanently responses

    +

    number

    +

    304

    +

    Number of HTTP 304 Not Modified responses

    +

    number

    +

    400

    +

    Number of HTTP 400 Bad Request responses

    +

    number

    +

    401

    +

    Number of HTTP 401 Unauthorized responses

    +

    number

    +

    403

    +

    Number of HTTP 403 Forbidden responses

    +

    number

    +

    404

    +

    Number of HTTP 404 Not Found responses

    +

    number

    +

    405

    +

    Number of HTTP 405 Method Not Allowed responses

    +

    number

    +

    409

    +

    Number of HTTP 409 Conflict responses

    +

    number

    +

    412

    +

    Number of HTTP 412 Precondition Failed responses

    +

    number

    +

    500

    +

    Number of HTTP 500 Internal Server Error responses

    +

    number

    +
    +
  • +
  • httpd

    + +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

    Statistic ID

    +

    Description

    +

    Unit

    +

    bulk_requests

    +

    Number of bulk requests

    +

    number

    +

    clients_requesting_changes

    +

    Number of clients for continuous _changes

    +

    number

    +

    requests

    +

    Number of HTTP requests

    +

    number

    +

    temporary_view_reads

    +

    Number of temporary view reads

    +

    number

    +

    view_reads

    +

    Number of view reads

    +

    number

    +
    +
  • +
+

You can also access individual statistics by quoting the statistics +sections and statistic ID as part of the URL path. For example, to get +the request_time statistics, you can use:

+
GET /_stats/couchdb/request_time
+
+

This returns an entire statistics object, as with the full request, but +containing only the request individual statistic. Hence, the returned +structure is as follows:

+
{
+   "couchdb" : {
+      "request_time" : {
+         "stddev" : 7454.305,
+         "min" : 1,
+         "max" : 34185,
+         "current" : 34697.803,
+         "mean" : 1652.276,
+         "sum" : 34697.803,
+         "description" : "length of a request inside CouchDB without MochiWeb"
+      }
+   }
+}
+
+
+
+
+

9.5.9. GET /_utils

+
    +
  • Method: GET /_utils
  • +
  • Request: None
  • +
  • Response: Administration interface
  • +
  • Admin Privileges Required: no
  • +
+

Accesses the built-in Futon administration interface for CouchDB.

+
+
+

9.5.10. GET /_uuids

+
    +
  • Method: GET /_uuids
  • +
  • Request: None
  • +
  • Response: List of UUIDs
  • +
  • Admin Privileges Required: no
  • +
  • Query Arguments:
      +
    • Argument: count
        +
      • Description: Number of UUIDs to return
      • +
      • Optional: yes
      • +
      • Type: numeric
      • +
      +
    • +
    +
  • +
  • Return Codes:
      +
    • 200: +Request completed successfully.
    • +
    +
  • +
+

Requests one or more Universally Unique Identifiers (UUIDs) from the +CouchDB instance. The response is a JSON object providing a list of +UUIDs. For example:

+
{
+   "uuids" : [
+      "7e4b5a14b22ec1cf8e58b9cdd0000da3"
+   ]
+}
+
+
+

You can use the count argument to specify the number of UUIDs to be +returned. For example:

+
GET http://couchdb:5984/_uuids?count=5
+
+

Returns:

+
{
+   "uuids" : [
+      "c9df0cdf4442f993fc5570225b405a80",
+      "c9df0cdf4442f993fc5570225b405bd2",
+      "c9df0cdf4442f993fc5570225b405e42",
+      "c9df0cdf4442f993fc5570225b4061a0",
+      "c9df0cdf4442f993fc5570225b406a20"
+   ]
+}
+
+
+

The UUID type is determined by the UUID type setting in the CouchDB +configuration. See PUT /_config/section/key.

+

For example, changing the UUID type to random:

+
PUT http://couchdb:5984/_config/uuids/algorithm
+Content-Type: application/json
+Accept: */*
+
+"random"
+
+

When obtaining a list of UUIDs:

+
{
+   "uuids" : [
+      "031aad7b469956cf2826fcb2a9260492",
+      "6ec875e15e6b385120938df18ee8e496",
+      "cff9e881516483911aa2f0e98949092d",
+      "b89d37509d39dd712546f9510d4a9271",
+      "2e0dbf7f6c4ad716f21938a016e4e59f"
+   ]
+}
+
+
+
+
+

9.5.11. GET /favicon.ico

+
    +
  • Method: GET /favicon.ico
  • +
  • Request: None
  • +
  • Response: Binary content for the favicon.ico site icon
  • +
  • Admin Privileges Required: no
  • +
  • Return Codes:
      +
    • 200: +Request completed successfully.
    • +
    • 404: +The requested content could not be found. The returned content will include +further information, as a JSON object, if available.
    • +
    +
  • +
+

Returns the site icon. The return Content-Type header is +image/x-icon, and the content stream is the image data.

+
+
+ + +
+
+
+ +
+
+ + + + \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/api/reference.html couchdb-1.4.0~rc.1/share/doc/build/html/api/reference.html --- couchdb-1.2.0/share/doc/build/html/api/reference.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/api/reference.html 2013-08-23 10:58:50.000000000 -0400 @@ -0,0 +1,273 @@ + + + + + + + + + + 9. API Reference — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

9. API Reference

+

The components of the API URL path help determine the part of the +CouchDB server that is being accessed. The result is the structure of +the URL request both identifies and effectively describes the area of +the database you are accessing.

+

As with all URLs, the individual components are separated by a forward +slash.

+

As a general rule, URL components and JSON fields starting with the +_ (underscore) character represent a special component or entity +within the server or returned object. For example, the URL fragment +/_all_dbs gets a list of all of the databases in a CouchDB instance.

+

This reference is structured according to the URL structure, as below.

+
+ +
+
+ + +
+
+
+
+
+ + + + + +

Previous topic

+

8. Changes Feed

+

Next topic

+

9.1. Database Methods

+ + +

Utilities

+ + + + +

More Help

+ + +
+
+
+
+ + + + \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/api-basics.html couchdb-1.4.0~rc.1/share/doc/build/html/api-basics.html --- couchdb-1.2.0/share/doc/build/html/api-basics.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/api-basics.html 2013-08-23 10:58:36.000000000 -0400 @@ -0,0 +1,556 @@ + + + + + + + + + + 2. API Basics — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

2. API Basics

+

The CouchDB API is the primary method of interfacing to a CouchDB +instance. Requests are made using HTTP and requests are used to request +information from the database, store new data, and perform views and +formatting of the information stored within the documents.

+

Requests to the API can be categorised by the different areas of the +CouchDB system that you are accessing, and the HTTP method used to send +the request. Different methods imply different operations, for example +retrieval of information from the database is typically handled by the +GET operation, while updates are handled by either a POST or +PUT request. There are some differences between the information that +must be supplied for the different methods. For a guide to the basic +HTTP methods and request structure, see Request Format and Responses.

+

For nearly all operations, the submitted data, and the returned data +structure, is defined within a JavaScript Object Notation (JSON) object. +Basic information on the content and data types for JSON are provided in +JSON Basics.

+

Errors when accessing the CouchDB API are reported using standard HTTP +Status Codes. A guide to the generic codes returned by CouchDB are +provided in HTTP Status Codes.

+

When accessing specific areas of the CouchDB API, specific information +and examples on the HTTP methods and request, JSON structures, and error +codes are provided. For a guide to the different areas of the API, see +API Reference.

+
+

2.1. Request Format and Responses

+

CouchDB supports the following HTTP request methods:

+
    +
  • GET

    +

    Request the specified item. As with normal HTTP requests, the format +of the URL defines what is returned. With CouchDB this can include +static items, database documents, and configuration and statistical +information. In most cases the information is returned in the form of +a JSON document.

    +
  • +
  • HEAD

    +

    The HEAD method is used to get the HTTP header of a GET +request without the body of the response.

    +
  • +
  • POST

    +

    Upload data. Within CouchDB POST is used to set values, including +uploading documents, setting document values, and starting certain +administration commands.

    +
  • +
  • PUT

    +

    Used to put a specified resource. In CouchDB PUT is used to +create new objects, including databases, documents, views and design +documents.

    +
  • +
  • DELETE

    +

    Deletes the specified resource, including documents, views, and +design documents.

    +
  • +
  • COPY

    +

    A special method that can be used to copy documents and objects.

    +
  • +
+

If you use the an unsupported HTTP request type with a URL that does not +support the specified type, a 405 error will be returned, listing the +supported HTTP methods. For example:

+
{
+    "error":"method_not_allowed",
+    "reason":"Only GET,HEAD allowed"
+}
+
+
+

The CouchDB design document API and the functions when returning HTML +(for example as part of a show or list) enables you to include custom +HTTP headers through the headers block of the return object.

+
+
+

2.2. HTTP Headers

+

Because CouchDB uses HTTP for all communication, you need to ensure that +the correct HTTP headers are supplied (and processed on retrieval) so +that you get the right format and encoding. Different environments and +clients will be more or less strict on the effect of these HTTP headers +(especially when not present). Where possible you should be as specific +as possible.

+
+

2.2.1. Request Headers

+
    +
  • Content-type

    +

    Specifies the content type of the information being supplied within +the request. The specification uses MIME type specifications. For the +majority of requests this will be JSON (application/json). For +some settings the MIME type will be plain text. When uploading +attachments it should be the corresponding MIME type for the +attachment or binary (application/octet-stream).

    +

    The use of the Content-type on a request is highly recommended.

    +
  • +
  • Accept

    +

    Specifies the list of accepted data types to be returned by the +server (i.e. that are accepted/understandable by the client). The +format should be a list of one or more MIME types, separated by +colons.

    +

    For the majority of requests the definition should be for JSON data +(application/json). For attachments you can either specify the +MIME type explicitly, or use */* to specify that all file types +are supported. If the Accept header is not supplied, then the +*/* MIME type is assumed (i.e. client accepts all formats).

    +

    The use of Accept in queries for CouchDB is not required, but is +highly recommended as it helps to ensure that the data returned can +be processed by the client.

    +

    If you specify a data type using the Accept header, CouchDB will +honor the specified type in the Content-type header field +returned. For example, if you explicitly request application/json +in the Accept of a request, the returned HTTP headers will use +the value in the returned Content-type field.

    +

    For example, when sending a request without an explicit Accept +header, or when specifying */*:

    +
    GET /recipes HTTP/1.1
    +Host: couchdb:5984
    +Accept: */*
    +
    +
    +

    The returned headers are:

    +
    Server: CouchDB/1.0.1 (Erlang OTP/R13B)
    +Date: Thu, 13 Jan 2011 13:39:34 GMT
    +Content-Type: text/plain;charset=utf-8
    +Content-Length: 227
    +Cache-Control: must-revalidate
    +
    +

    Note that the returned content type is text/plain even though the +information returned by the request is in JSON format.

    +

    Explicitly specifying the Accept header:

    +
    GET /recipes HTTP/1.1
    +Host: couchdb:5984
    +Accept: application/json
    +
    +
    +

    The headers returned include the application/json content type:

    +
    Server: CouchDB/|version| (Erlang OTP/R13B)
    +Date: Thu, 13 Jan 2011 13:40:11 GMT
    +Content-Type: application/json
    +Content-Length: 227
    +Cache-Control: must-revalidate
    +
    +
  • +
+
+
+

2.2.2. Response Headers

+

Response headers are returned by the server when sending back content +and include a number of different header fields, many of which are +standard HTTP response header and have no significance to CouchDB +operation. The list of response headers important to CouchDB are listed +below.

+
    +
  • Content-type

    +

    Specifies the MIME type of the returned data. For most request, the +returned MIME type is text/plain. All text is encoded in Unicode +(UTF-8), and this is explicitly stated in the returned +Content-type, as text/plain;charset=utf-8.

    +
  • +
  • Cache-control

    +

    The cache control HTTP response header provides a suggestion for +client caching mechanisms on how to treat the returned information. +CouchDB typically returns the must-revalidate, which indicates +that the information should be revalidated if possible. This is used +to ensure that the dynamic nature of the content is correctly +updated.

    +
  • +
  • Content-length

    +

    The length (in bytes) of the returned content.

    +
  • +
  • Etag

    +

    The Etag HTTP header field is used to show the revision for a +document, or a view.

    +

    ETags have been assigned to a map/reduce group (the collection of +views in a single design document). Any change to any of the indexes +for those views would generate a new ETag for all view URL’s in a +single design doc, even if that specific view’s results had not +changed.

    +

    Each _view URL has its own ETag which only gets updated when +changes are made to the database that effect that index. If the +index for that specific view does not change, that view keeps the +original ETag head (therefore sending back 304 Not Modified more +often).

    +
  • +
+
+
+
+

2.3. JSON Basics

+

The majority of requests and responses to CouchDB use the JavaScript +Object Notation (JSON) for formatting the content and structure of the +data and responses.

+

JSON is used because it is the simplest and easiest to use solution for +working with data within a web browser, as JSON structures can be +evaluated and used as JavaScript objects within the web browser +environment. JSON also integrates with the server-side JavaScript used +within CouchDB.

+

JSON supports the same basic types as supported by JavaScript, these +are:

+
    +
  • Number (either integer or floating-point).

    +
  • +
  • String; this should be enclosed by double-quotes and supports Unicode +characters and backslash escaping. For example:

    +
    "A String"
    +
    +
    +
  • +
  • Boolean - a true or false value. You can use these strings +directly. For example:

    +
    { "value": true}
    +
    +
    +
  • +
  • Array - a list of values enclosed in square brackets. For example:

    +
    ["one", "two", "three"]
    +
    +
    +
  • +
  • Object - a set of key/value pairs (i.e. an associative array, or +hash). The key must be a string, but the value can be any of the +supported JSON values. For example:

    +
    {
    +   "servings" : 4,
    +   "subtitle" : "Easy to make in advance, and then cook when ready",
    +   "cooktime" : 60,
    +   "title" : "Chicken Coriander"
    +}
    +
    +
    +

    In CouchDB, the JSON object is used to represent a variety of +structures, including the main CouchDB document.

    +
  • +
+

Parsing JSON into a JavaScript object is supported through the +JSON.parse() function in JavaScript, or through various libraries that +will perform the parsing of the content into a JavaScript object for +you. Libraries for parsing and generating JSON are available in many +languages, including Perl, Python, Ruby, Erlang and others.

+
+

Warning

+

Care should be taken to ensure that your JSON structures are +valid, invalid structures will cause CouchDB to return an HTTP status code +of 500 (server error).

+
+
+
+

2.4. HTTP Status Codes

+

With the interface to CouchDB working through HTTP, error codes and +statuses are reported using a combination of the HTTP status code +number, and corresponding data in the body of the response data.

+

A list of the error codes returned by CouchDB, and generic descriptions +of the related errors are provided below. The meaning of different +status codes for specific request types are provided in the +corresponding API call reference.

+
    +
  • 200 - OK

    +

    Request completed successfully.

    +
  • +
  • 201 - Created

    +

    Document created successfully.

    +
  • +
  • 202 - Accepted

    +

    Request has been accepted, but the corresponding operation may not +have completed. This is used for background operations, such as +database compaction.

    +
  • +
  • 304 - Not Modified

    +

    The additional content requested has not been modified. This is used +with the ETag system to identify the version of information returned.

    +
  • +
  • 400 - Bad Request

    +

    Bad request structure. The error can indicate an error with the +request URL, path or headers. Differences in the supplied MD5 hash +and content also trigger this error, as this may indicate message +corruption.

    +
  • +
  • 401 - Unauthorized

    +

    The item requested was not available using the supplied +authorization, or authorization was not supplied.

    +
  • +
  • 403 - Forbidden

    +

    The requested item or operation is forbidden.

    +
  • +
  • 404 - Not Found

    +

    The requested content could not be found. The content will include +further information, as a JSON object, if available. The structure +will contain two keys, error and reason. For example:

    +
    {"error":"not_found","reason":"no_db_file"}
    +
    +
    +
  • +
  • 405 - Resource Not Allowed

    +

    A request was made using an invalid HTTP request type for the URL +requested. For example, you have requested a PUT when a POST +is required. Errors of this type can also triggered by invalid URL +strings.

    +
  • +
  • 406 - Not Acceptable

    +

    The requested content type is not supported by the server.

    +
  • +
  • 409 - Conflict

    +

    Request resulted in an update conflict.

    +
  • +
  • 412 - Precondition Failed

    +

    The request headers from the client and the capabilities of the +server do not match.

    +
  • +
  • 415 - Bad Content Type

    +

    The content types supported, and the content type of the information +being requested or submitted indicate that the content type is not +supported.

    +
  • +
  • 416 - Requested Range Not Satisfiable

    +

    The range specified in the request header cannot be satisfied by the +server.

    +
  • +
  • 417 - Expectation Failed

    +

    When sending documents in bulk, the bulk load operation failed.

    +
  • +
  • 500 - Internal Server Error

    +

    The request was invalid, either because the supplied JSON was +invalid, or invalid information was supplied as part of the request.

    +
  • +
+
+
+

2.5. HTTP Range Requests

+

HTTP allows you to specify byte ranges for requests. This allows the +implementation of resumable downloads and skippable audio and video +streams alike. This is available for all attachments inside CouchDB.

+

This is just a real quick run through how this looks under the hood. +Usually, you will have larger binary files to serve from CouchDB, like +MP3s and videos, but to make things a little more obvious, I use a text +file here (Note that I use the application/octet-stream Content-Type +instead of text/plain).

+
shell> cat file.txt
+My hovercraft is full of eels!
+
+
+

Now let’s store this text file as an attachment in CouchDB. First, we +create a database:

+
shell> curl -X PUT http://127.0.0.1:5984/test
+{"ok":true}
+
+
+

Then we create a new document and the file attachment in one go:

+
shell> curl -X PUT http://127.0.0.1:5984/test/doc/file.txt \
+            -H "Content-Type: application/octet-stream" -d@file.txt
+{"ok":true,"id":"doc","rev":"1-287a28fa680ae0c7fb4729bf0c6e0cf2"}
+
+
+

Now we can request the whole file easily:

+
shell> curl -X GET http://127.0.0.1:5984/test/doc/file.txt
+My hovercraft is full of eels!
+
+
+

But say we only want the first 13 bytes:

+
shell> curl -X GET http://127.0.0.1:5984/test/doc/file.txt \
+            -H "Range: bytes=0-12"
+My hovercraft
+
+
+

HTTP supports many ways to specify single and even multiple byte +ranges. Read all about it in RFC 2616.

+
+

Note

+

Databases that have been created with CouchDB 1.0.2 or earlier will +support range requests in 1.4, but they are using a less-optimal +algorithm. If you plan to make heavy use of this feature, make sure +to compact your database with CouchDB 1.4 to take advantage of a +better algorithm to find byte ranges.

+
+
+
+ + +
+
+
+
+
+ + + + + +

Table Of Contents

+ + +

Previous topic

+

1. Introduction

+

Next topic

+

3. Configuration

+ + +

Utilities

+ + + + +

More Help

+ + +
+
+
+
+ + + + \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/changelog.html couchdb-1.4.0~rc.1/share/doc/build/html/changelog.html --- couchdb-1.2.0/share/doc/build/html/changelog.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/changelog.html 2013-08-23 10:58:54.000000000 -0400 @@ -0,0 +1,2043 @@ + + + + + + + + + + 13. Release History — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + + +
+
+
+
+ +
+

13. Release History

+ +
+

13.1. 1.4.x Branch

+ +
+

13.1.1. Upgrade Notes

+

We now support Erlang/OTP R16B and R16B01; the minimum required version is R14B.

+

User document role values must now be strings. Other types of values will be +refused when saving the user document.

+
+
+

13.1.2. Version 1.4.0

+ +
+
+
+

13.2. 1.3.x Branch

+ +
+

13.2.1. Upgrade Notes

+

You can upgrade your existing CouchDB 1.0.x installation to 1.3.0 +without any specific steps or migration. When you run CouchDB, the +existing data and index files will be opened and used as normal.

+

The first time you run a compaction routine on your database within 1.3.0, +the data structure and indexes will be updated to the new version of the +CouchDB database format that can only be read by CouchDB 1.3.0 and later. +This step is not reversible. Once the data files have been updated and +migrated to the new version the data files will no longer work with a +CouchDB 1.0.x release.

+
+

Warning

+

If you want to retain support for opening the data files in +CouchDB 1.0.x you must back up your data files before performing the +upgrade and compaction process.

+
+
+
+

13.2.2. Version 1.3.1

+
+

13.2.2.1. Replicator

+ +
+
+

13.2.2.2. Log System

+
    +
  • Don’t log about missing .compact files. #06f1a8dc
  • +
  • COUCHDB-1794: Fix bug in WARN level logging from 1.3.0.
  • +
+
+
+

13.2.2.3. View Server

+ +
+
+

13.2.2.4. Miscellaneous

+
    +
  • Improve documentation: better structure, improve language, less duplication.
  • +
  • COUCHDB-1784: Improvements to test suite and VPATH build system. +#01afaa4f
  • +
+
+
+
+

13.2.3. Version 1.3.0

+
+

13.2.3.1. Database core

+ +
+
+

13.2.3.2. Documentation

+ +
+
+

13.2.3.3. Futon

+ +
+
+

13.2.3.4. HTTP Interface

+ +
+
+

13.2.3.5. Log System

+
    +
  • COUCHDB-1380: Minor fixes for logrotate support.
  • +
  • Improve file I/O error logging and handling, #4b6475da
  • +
  • Module Level Logging, #b58f069167
  • +
  • Log 5xx responses at error level, #e896b0b7
  • +
  • Log problems opening database at ERROR level except for auto-created +system dbs, #41667642f7
  • +
+
+
+

13.2.3.6. Replicator

+
    +
  • COUCHDB-1557: Upgrade some code to use BIFs bring good improvements for +replication.
  • +
  • COUCHDB-1363: Fix rarely occurred, but still race condition in changes feed +if a quick burst of changes happens while replication is starting the +replication can go stale. #573a7bb9
  • +
  • COUCHDB-1323: Replicator now acts as standalone application. +#f913ca6e
  • +
  • COUCHDB-1259: Stabilize replication id, #c6252d6d7f
  • +
  • COUCHDB-1248: HTTP 500 error now doesn’t occurs when replicating with +?doc_ids=null. #bea76dbf
  • +
+
+
+

13.2.3.7. Security

+
    +
  • COUCHDB-1060: Passwords are now hashed using the PBKDF2 algorithm with a +configurable work factor. #7d418134
  • +
+
+
+

13.2.3.8. Source Repository

+
    +
  • The source repository was migrated from SVN to Git.
  • +
+
+
+

13.2.3.9. Storage System

+
    +
  • Fixed unnecessary conflict when deleting and creating a +document in the same batch.
  • +
+
+
+

13.2.3.10. Test Suite

+
    +
  • COUCHDB-1563: Ensures urlPrefix is set in all ajax requests. +#07a6af222
  • +
  • COUCHDB-1389: Improved tracebacks printed by the JS CLI tests.
  • +
  • COUCHDB-1339: Use shell trap to catch dying beam processes during test runs. +#2921c78
  • +
  • COUCHDB-1338: Start CouchDB with port=0. While CouchDB might be already +running on the default port 5984, port number 0 let the TCP stack figure out a +free port to run. #127cbe3
  • +
  • COUCHDB-1321: Moved the JS test suite to the CLI.
  • +
  • Improved the reliability of a number of tests.
  • +
  • Fix race condition for test running on faster hardware.
  • +
+
+
+

13.2.3.11. URL Rewriter & Vhosts

+ +
+
+

13.2.3.12. UUID Algorithms

+ +
+
+

13.2.3.13. Query and View Server

+
    +
  • COUCHDB-1491: Clenaup view tables. #c37204b7
  • +
  • COUCHDB-1483: Update handlers requires valid doc ids. #72ea7e38
  • +
  • COUCHDB-1445: CouchDB tries no more to delete view file if it couldn’t open +it, even if the error is emfile.
  • +
  • COUCHDB-1444: Fix missed_named_view error that occurs on existed design +documents and views. #b59ac98b
  • +
  • COUCHDB-1372: _stats builtin reduce function no longer produces error for +empty view result.
  • +
  • COUCHDB-410: More graceful error handling for JavaScript validate_doc_update +functions.
  • +
  • COUCHDB-111: Improve the errors reported by the javascript view server +to provide a more friendly error report when something goes wrong. +#0c619ed
  • +
  • Deprecate E4X support, #cdfdda2314
  • +
+
+
+

13.2.3.14. Windows

+ +
+
+
+
+

13.3. 1.2.x Branch

+ +
+

13.3.1. Upgrade Notes

+
+

Warning

+

This version drops support for the database format that was introduced in +version 0.9.0. Compact your older databases (that have not been compacted +for a long time) before upgrading, or they will become inaccessible.

+
+
+

13.3.1.1. Security changes

+

The interface to the _users and _replicator databases have been +changed so that non-administrator users can see less information:

+
    +
  • In the _users database:
      +
    • User documents can now only be read by the respective users, as well as +administrators. Other users cannot read these documents.
    • +
    • Views can only be defined and queried by administrator users.
    • +
    • The _changes feed can only be queried by administrator users.
    • +
    +
  • +
  • In the _replicator database:
      +
    • Documents now have a forced owner field that corresponds to the +authenticated user that created them.
    • +
    • Non-owner users will not see confidential information like passwords or +OAuth tokens in replication documents; they can still see the other +contents of those documents. Administrators can see everything.
    • +
    • Views can only be defined and queried by administrators.
    • +
    +
  • +
+
+
+

13.3.1.2. Database Compression

+

The new optional (but enabled by default) compression of disk files requires +an upgrade of the on-disk format (5 -> 6) which occurs on creation for new +databases and views, and on compaction for existing files. This format is not +supported in previous releases, so rollback would require replication to the +previous CouchDB release or restoring from backup.

+

Compression can be disabled by setting compression = none in your +local.ini [couchdb] section, but the on-disk format will still be +upgraded.

+
+
+
+

13.3.2. Version 1.2.1

+
+

13.3.2.1. Security

+
    +
  • Fixed CVE-2012-5641: Apache CouchDB Information disclosure via unescaped +backslashes in URLs on Windows
  • +
  • Fixed CVE-2012-5649: Apache CouchDB JSONP arbitrary code execution with Adobe +Flash
  • +
  • Fixed CVE-2012-5650: Apache CouchDB DOM based Cross-Site Scripting via Futon +UI
  • +
+
+
+

13.3.2.2. HTTP Interface

+
    +
  • No longer rewrites the X-CouchDB-Requested-Path during recursive +calls to the rewriter.
  • +
  • Limit recursion depth in the URL rewriter. Defaults to a maximum +of 100 invocations but is configurable.
  • +
+
+
+

13.3.2.3. Build System

+
    +
  • Fix couchdb start script.
  • +
  • Win: fix linker invocations.
  • +
+
+
+

13.3.2.4. Futon

+
    +
  • Disable buttons that aren’t available for the logged-in user.
  • +
+
+
+

13.3.2.5. Replication

+
    +
  • Fix potential timeouts.
  • +
+
+
+

13.3.2.6. View System

+
    +
  • Change use of signals to avoid broken view groups.
  • +
+
+
+
+

13.3.3. Version 1.2.0

+
+

13.3.3.1. Authentication

+
    +
  • Fix use of OAuth with VHosts and URL rewriting.
  • +
  • OAuth secrets can now be stored in the users system database +as an alternative to key value pairs in the .ini configuration. +By default this is disabled (secrets are stored in the .ini) +but can be enabled via the .ini configuration key use_users_db +in the couch_httpd_oauth section.
  • +
  • Documents in the _users database are no longer publicly +readable.
  • +
  • Confidential information in the _replication database is no +longer publicly readable.
  • +
  • Password hashes are now calculated by CouchDB. Clients are no +longer required to do this manually.
  • +
  • Cookies used for authentication can be made persistent by enabling +the .ini configuration key allow_persistent_cookies in the +couch_httpd_auth section.
  • +
+
+
+

13.3.3.2. Build System

+
    +
  • cURL is no longer required to build CouchDB as it is only +used by the command line JS test runner. If cURL is available +when building CouchJS you can enable the HTTP bindings by +passing -H on the command line.
  • +
  • Temporarily made make check pass with R15B. A more thorough +fix is in the works (COUCHDB-1424).
  • +
  • Fixed –with-js-include and –with-js-lib options.
  • +
  • Added –with-js-lib-name option.
  • +
+
+
+

13.3.3.3. Futon

+
    +
  • The Status screen (active tasks) now displays two new task status +fields: Started on and Updated on.
  • +
  • Futon remembers view code every time it is saved, allowing to save an +edit that amounts to a revert.
  • +
+
+
+

13.3.3.4. HTTP Interface

+
    +
  • Added a native JSON parser.
  • +
  • The _active_tasks API now offers more granular fields. Each +task type is now able to expose different properties.
  • +
  • Added built-in changes feed filter _view.
  • +
  • Fixes to the _changes feed heartbeat option which caused +heartbeats to be missed when used with a filter. This caused +timeouts of continuous pull replications with a filter.
  • +
  • Properly restart the SSL socket on configuration changes.
  • +
+
+
+

13.3.3.5. Replicator

+
    +
  • A new replicator implementation. It offers more performance and +configuration options.
  • +
  • Passing non-string values to query_params is now a 400 bad +request. This is to reduce the surprise that all parameters +are converted to strings internally.
  • +
  • Added optional field since_seq to replication objects/documents. +It allows to bootstrap a replication from a specific source sequence +number.
  • +
  • Simpler replication cancellation. In addition to the current method, +replications can now be canceled by specifying the replication ID +instead of the original replication object/document.
  • +
+
+
+

13.3.3.6. Storage System

+
    +
  • Added optional database and view index file compression (using Google’s +snappy or zlib’s deflate). This feature is enabled by default, but it +can be disabled by adapting local.ini accordingly. The on-disk format +is upgraded on compaction and new DB/view creation to support this.
  • +
  • Several performance improvements, most notably regarding database writes +and view indexing.
  • +
  • Computation of the size of the latest MVCC snapshot data and all its +supporting metadata, both for database and view index files. This +information is exposed as the data_size attribute in the database and +view group information URIs.
  • +
  • The size of the buffers used for database and view compaction is now +configurable.
  • +
  • Added support for automatic database and view compaction. This feature +is disabled by default, but it can be enabled via the .ini configuration.
  • +
  • Performance improvements for the built-in changes feed filters _doc_ids +and _design.
  • +
+
+
+

13.3.3.7. View Server

+
    +
  • Add CoffeeScript (http://coffeescript.org/) as a first class view server +language.
  • +
  • Fixed old index file descriptor leaks after a view cleanup.
  • +
  • The requested_path property keeps the pre-rewrite path even when no VHost +configuration is matched.
  • +
  • Fixed incorrect reduce query results when using pagination parameters.
  • +
  • Made icu_driver work with Erlang R15B and later.
  • +
+
+
+

13.3.3.8. OAuth

+
    +
  • Updated bundled erlang_oauth library to the latest version.
  • +
+
+
+
+
+

13.4. 1.1.x Branch

+ +
+

13.4.1. Version 1.1.2

+
+

13.4.1.1. Security

+
    +
  • Fixed CVE-2012-5641: Apache CouchDB Information disclosure via unescaped +backslashes in URLs on Windows.
  • +
  • Fixed CVE-2012-5649: Apache CouchDB JSONP arbitrary code execution with +Adobe Flash.
  • +
  • Fixed CVE-2012-5650: Apache CouchDB DOM based Cross-Site Scripting via Futon +UI.
  • +
+
+
+

13.4.1.2. HTTP Interface

+
    +
  • ETag of attachment changes only when the attachment changes, not +the document.
  • +
  • Fix retrieval of headers larger than 4k.
  • +
  • Allow OPTIONS HTTP method for list requests.
  • +
  • Don’t attempt to encode invalid json.
  • +
+
+
+

13.4.1.3. Replicator

+
    +
  • Fix pull replication of documents with many revisions.
  • +
  • Fix replication from an HTTP source to an HTTP target.
  • +
+
+
+

13.4.1.4. View Server

+
    +
  • Avoid invalidating view indexes when running out of file descriptors.
  • +
+
+
+

13.4.1.5. Log System

+
    +
  • Improvements to log messages for file-related errors.
  • +
+
+
+

13.4.1.6. Build System

+
    +
  • Don’t ln the couchjs install target on Windows
  • +
  • Remove ICU version dependency on Windows.
  • +
  • Improve SpiderMonkey version detection.
  • +
+
+
+
+

13.4.2. Version 1.1.1

+
    +
  • Support SpiderMonkey 1.8.5
  • +
  • Add configurable maximum to the number of bytes returned by _log.
  • +
  • Allow CommonJS modules to be an empty string.
  • +
  • Bump minimum Erlang version to R13B02.
  • +
  • Do not run deleted validate_doc_update functions.
  • +
  • ETags for views include current sequence if include_docs=true.
  • +
  • Fix bug where duplicates can appear in _changes feed.
  • +
  • Fix bug where update handlers break after conflict resolution.
  • +
  • Fix bug with _replicator where include “filter” could crash couch.
  • +
  • Fix crashes when compacting large views.
  • +
  • Fix file descriptor leak in _log
  • +
  • Fix missing revisions in _changes?style=all_docs.
  • +
  • Improve handling of compaction at max_dbs_open limit.
  • +
  • JSONP responses now send “text/javascript” for Content-Type.
  • +
  • Link to ICU 4.2 on Windows.
  • +
  • Permit forward slashes in path to update functions.
  • +
  • Reap couchjs processes that hit reduce_overflow error.
  • +
  • Status code can be specified in update handlers.
  • +
  • Support provides() in show functions.
  • +
  • _view_cleanup when ddoc has no views now removes all index files.
  • +
  • max_replication_retry_count now supports “infinity”.
  • +
  • Fix replication crash when source database has a document with empty ID.
  • +
  • Fix deadlock when assigning couchjs processes to serve requests.
  • +
  • Fixes to the document multipart PUT API.
  • +
  • Fixes regarding file descriptor leaks for databases with views.
  • +
+
+
+

13.4.3. Version 1.1.0

+
+

Note

+

All CHANGES for 1.0.2 and 1.0.3 also apply to 1.1.0.

+
+
+

13.4.3.1. Externals

+
    +
  • Added OS Process module to manage daemons outside of CouchDB.
  • +
  • Added HTTP Proxy handler for more scalable externals.
  • +
+
+
+

13.4.3.2. Futon

+
    +
  • Added a “change password”-feature to Futon.
  • +
+
+
+

13.4.3.3. HTTP Interface

+
    +
  • Native SSL support.
  • +
  • Added support for HTTP range requests for attachments.
  • +
  • Added built-in filters for _changes: _doc_ids and _design.
  • +
  • Added configuration option for TCP_NODELAY aka “Nagle”.
  • +
  • Allow POSTing arguments to _changes.
  • +
  • Allow keys parameter for GET requests to views.
  • +
  • Allow wildcards in vhosts definitions.
  • +
  • More granular ETag support for views.
  • +
  • More flexible URL rewriter.
  • +
  • Added support for recognizing “Q values” and media parameters in +HTTP Accept headers.
  • +
  • Validate doc ids that come from a PUT to a URL.
  • +
+
+
+

13.4.3.4. Replicator

+
    +
  • Added _replicator database to manage replications.
  • +
  • Fixed issues when an endpoint is a remote database accessible via SSL.
  • +
  • Added support for continuous by-doc-IDs replication.
  • +
  • Fix issue where revision info was omitted when replicating attachments.
  • +
  • Integrity of attachment replication is now verified by MD5.
  • +
+
+
+

13.4.3.5. Storage System

+
    +
  • Multiple micro-optimizations when reading data.
  • +
+
+
+

13.4.3.6. URL Rewriter & Vhosts

+
    +
  • Fix for variable substituion
  • +
+
+
+

13.4.3.7. View Server

+
    +
  • Added CommonJS support to map functions.
  • +
  • Added stale=update_after query option that triggers a view update after +returning a stale=ok response.
  • +
  • Warn about empty result caused by startkey and endkey limiting.
  • +
  • Built-in reduce function _sum now accepts lists of integers as input.
  • +
  • Added view query aliases start_key, end_key, start_key_doc_id and +end_key_doc_id.
  • +
+
+
+
+
+

13.5. 1.0.x Branch

+ +
+

13.5.1. Version 1.0.4

+
+

13.5.1.1. Security

+
    +
  • Fixed CVE-2012-5641: Apache CouchDB Information disclosure via unescaped +backslashes in URLs on Windows.
  • +
  • Fixed CVE-2012-5649: Apache CouchDB JSONP arbitrary code execution with +Adobe Flash.
  • +
  • Fixed CVE-2012-5650: Apache CouchDB DOM based Cross-Site Scripting via Futon +UI.
  • +
+
+
+

13.5.1.2. Log System

+
    +
  • Fix file descriptor leak in _log.
  • +
+
+
+

13.5.1.3. HTTP Interface

+
    +
  • Fix missing revisions in _changes?style=all_docs.
  • +
  • Fix validation of attachment names.
  • +
+
+
+

13.5.1.4. View System

+
    +
  • Avoid invalidating view indexes when running out of file descriptors.
  • +
+
+
+

13.5.1.5. Replicator

+
    +
  • Fix a race condition where replications can go stale.
  • +
+
+
+
+

13.5.2. Version 1.0.3

+
+

13.5.2.1. General

+
    +
  • Fixed compatibility issues with Erlang R14B02.
  • +
+
+
+

13.5.2.2. Etap Test Suite

+
    +
  • Etap tests no longer require use of port 5984. They now use a randomly +selected port so they won’t clash with a running CouchDB.
  • +
+
+
+

13.5.2.3. Futon

+
    +
  • Made compatible with jQuery 1.5.x.
  • +
+
+
+

13.5.2.4. HTTP Interface

+
    +
  • Fix bug that allows invalid UTF-8 after valid escapes.
  • +
  • The query parameter include_docs now honors the parameter conflicts. +This applies to queries against map views, _all_docs and _changes.
  • +
  • Added support for inclusive_end with reduce views.
  • +
+
+
+

13.5.2.5. Replicator

+
    +
  • Enabled replication over IPv6.
  • +
  • Fixed for crashes in continuous and filtered changes feeds.
  • +
  • Fixed error when restarting replications in OTP R14B02.
  • +
  • Upgrade ibrowse to version 2.2.0.
  • +
  • Fixed bug when using a filter and a limit of 1.
  • +
+
+
+

13.5.2.6. Security

+
    +
  • Fixed OAuth signature computation in OTP R14B02.
  • +
  • Handle passwords with : in them.
  • +
+
+
+

13.5.2.7. Storage System

+
    +
  • More performant queries against _changes and _all_docs when using the +include_docs parameter.
  • +
+
+
+

13.5.2.8. Windows

+
    +
  • Windows builds now require ICU >= 4.4.0 and Erlang >= R14B03. See +COUCHDB-1152, and COUCHDB-963 + OTP-9139 for more information.
  • +
+
+
+
+

13.5.3. Version 1.0.2

+
+

13.5.3.1. Security

+
    +
  • Fixed CVE-2010-3854: Apache CouchDB Cross Site Scripting Issue.
  • +
+
+
+

13.5.3.2. Futon

+
    +
  • Make test suite work with Safari and Chrome.
  • +
  • Fixed animated progress spinner.
  • +
  • Fix raw view document link due to overzealous URI encoding.
  • +
  • Spell javascript correctly in loadScript(uri).
  • +
+
+
+

13.5.3.3. HTTP Interface

+
    +
  • Allow reduce=false parameter in map-only views.
  • +
  • Fix parsing of Accept headers.
  • +
  • Fix for multipart GET APIs when an attachment was created during a +local-local replication. See COUCHDB-1022 for details.
  • +
+
+
+

13.5.3.4. Log System

+
    +
  • Reduce lengthy stack traces.
  • +
  • Allow logging of native <xml> types.
  • +
+
+
+

13.5.3.5. Replicator

+
    +
  • Updated ibrowse library to 2.1.2 fixing numerous replication issues.
  • +
  • Make sure that the replicator respects HTTP settings defined in the config.
  • +
  • Fix error when the ibrowse connection closes unexpectedly.
  • +
  • Fix authenticated replication (with HTTP basic auth) of design documents +with attachments.
  • +
  • Various fixes to make replication more resilient for edge-cases.
  • +
+
+
+

13.5.3.6. Storage System

+
    +
  • Fix leaking file handles after compacting databases and views.
  • +
  • Fix databases forgetting their validation function after compaction.
  • +
  • Fix occasional timeout errors after successfully compacting large databases.
  • +
  • Fix ocassional error when writing to a database that has just been compacted.
  • +
  • Fix occasional timeout errors on systems with slow or heavily loaded IO.
  • +
  • Fix for OOME when compactions include documents with many conflicts.
  • +
  • Fix for missing attachment compression when MIME types included parameters.
  • +
  • Preserve purge metadata during compaction to avoid spurious view rebuilds.
  • +
  • Fix spurious conflicts introduced when uploading an attachment after +a doc has been in a conflict. See COUCHDB-902 for details.
  • +
  • Fix for frequently edited documents in multi-master deployments being +duplicated in _changes and _all_docs. See COUCHDB-968 for details on how +to repair.
  • +
  • Significantly higher read and write throughput against database and +view index files.
  • +
+
+
+

13.5.3.7. View Server

+
    +
  • Don’t trigger view updates when requesting _design/doc/_info.
  • +
  • Fix for circular references in CommonJS requires.
  • +
  • Made isArray() function available to functions executed in the query server.
  • +
  • Documents are now sealed before being passed to map functions.
  • +
  • Force view compaction failure when duplicated document data exists. When +this error is seen in the logs users should rebuild their views from +scratch to fix the issue. See COUCHDB-999 for details.
  • +
+
+
+
+

13.5.4. Version 1.0.1

+
+

13.5.4.1. Security

+
    +
  • Fixed CVE-2010-2234: Apache CouchDB Cross Site Request Forgery Attack.
  • +
+
+
+

13.5.4.2. Authentication

+
    +
  • +
    Enable basic-auth popup when required to access the server, to prevent
    +

    people from getting locked out.

    +
    +
    +
  • +
+
+
+

13.5.4.3. Build and System Integration

+
    +
  • Included additional source files for distribution.
  • +
+
+
+

13.5.4.4. Futon

+
    +
  • User interface element for querying stale (cached) views.
  • +
+
+
+

13.5.4.5. HTTP Interface

+
    +
  • Expose committed_update_seq for monitoring purposes.
  • +
  • Show fields saved along with _deleted=true. Allows for auditing of deletes.
  • +
  • More robust Accept-header detection.
  • +
+
+
+

13.5.4.6. Replicator

+
    +
  • Added support for replication via an HTTP/HTTPS proxy.
  • +
  • Fix pull replication of attachments from 0.11 to 1.0.x.
  • +
  • Make the _changes feed work with non-integer seqnums.
  • +
+
+
+

13.5.4.7. Storage System

+ +
+
+
+

13.5.5. Version 1.0.0

+
+

13.5.5.1. Security

+
    +
  • Added authentication caching, to avoid repeated opening and closing of the +users database for each request requiring authentication.
  • +
+
+
+

13.5.5.2. Storage System

+
    +
  • Small optimization for reordering result lists.
  • +
  • More efficient header commits.
  • +
  • Use O_APPEND to save lseeks.
  • +
  • Faster implementation of pread_iolist(). Further improves performance on +concurrent reads.
  • +
+
+
+

13.5.5.3. View Server

+
    +
  • Faster default view collation.
  • +
  • Added option to include update_seq in view responses.
  • +
+
+
+
+
+

13.6. 0.11.x Branch

+ +
+

13.6.1. Version 0.11.2

+
+

13.6.1.1. Security

+
    +
  • Fixed CVE-2010-2234: Apache CouchDB Cross Site Request Forgery Attack.
  • +
  • Avoid potential DOS attack by guarding all creation of atoms.
  • +
+
+
+

13.6.1.2. Authentication

+
    +
  • User documents can now be deleted by admins or the user.
  • +
+
+
+

13.6.1.3. Futon

+
    +
  • Add some Futon files that were missing from the Makefile.
  • +
+
+
+

13.6.1.4. HTTP Interface

+
    +
  • Better error messages on invalid URL requests.
  • +
+
+
+

13.6.1.5. Replicator

+
    +
  • +
    Fix bug when pushing design docs by non-admins, which was hanging the
    +

    replicator for no good reason.

    +
    +
    +
  • +
  • +
    Fix bug when pulling design documents from a source that requires
    +

    basic-auth.

    +
    +
    +
  • +
+
+
+
+

13.6.2. Version 0.11.1

+
+

13.6.2.1. Build and System Integration

+
    +
  • Output of couchdb –help has been improved.
  • +
  • Fixed compatibility with the Erlang R14 series.
  • +
  • Fixed warnings on Linux builds.
  • +
  • Fixed build error when aclocal needs to be called during the build.
  • +
  • Require ICU 4.3.1.
  • +
  • Fixed compatibility with Solaris.
  • +
+
+
+

13.6.2.2. Configuration System

+
    +
  • Fixed timeout with large .ini files.
  • +
+
+
+

13.6.2.3. Futon

+
    +
  • Use “expando links” for over-long document values in Futon.
  • +
  • Added continuous replication option.
  • +
  • Added option to replicating test results anonymously to a community +CouchDB instance.
  • +
  • Allow creation and deletion of config entries.
  • +
  • Fixed display issues with doc ids that have escaped characters.
  • +
  • Fixed various UI issues.
  • +
+
+
+

13.6.2.4. HTTP Interface

+
    +
  • Mask passwords in active tasks and logging.
  • +
  • Update mochijson2 to allow output of BigNums not in float form.
  • +
  • Added support for X-HTTP-METHOD-OVERRIDE.
  • +
  • Better error message for database names.
  • +
  • Disable jsonp by default.
  • +
  • Accept gzip encoded standalone attachments.
  • +
  • Made max_concurrent_connections configurable.
  • +
  • Made changes API more robust.
  • +
  • Send newly generated document rev to callers of an update function.
  • +
+
+
+

13.6.2.5. JavaScript Clients

+
    +
  • Added tests for couch.js and jquery.couch.js
  • +
  • Added changes handler to jquery.couch.js.
  • +
  • Added cache busting to jquery.couch.js if the user agent is msie.
  • +
  • Added support for multi-document-fetch (via _all_docs) to jquery.couch.js.
  • +
  • Added attachment versioning to jquery.couch.js.
  • +
  • Added option to control ensure_full_commit to jquery.couch.js.
  • +
  • Added list functionality to jquery.couch.js.
  • +
  • Fixed issues where bulkSave() wasn’t sending a POST body.
  • +
+
+
+

13.6.2.6. Log System

+
    +
  • Log HEAD requests as HEAD, not GET.
  • +
  • Keep massive JSON blobs out of the error log.
  • +
  • Fixed a timeout issue.
  • +
+
+
+

13.6.2.7. Replication System

+
    +
  • Refactored various internal APIs related to attachment streaming.
  • +
  • Fixed hanging replication.
  • +
  • Fixed keepalive issue.
  • +
+
+
+

13.6.2.8. Security

+
    +
  • Added authentication redirect URL to log in clients.
  • +
  • Fixed query parameter encoding issue in oauth.js.
  • +
  • Made authentication timeout configurable.
  • +
  • Temporary views are now admin-only resources.
  • +
+
+
+

13.6.2.9. Storage System

+
    +
  • Don’t require a revpos for attachment stubs.
  • +
  • Added checking to ensure when a revpos is sent with an attachment stub, +it’s correct.
  • +
  • Make file deletions async to avoid pauses during compaction and db +deletion.
  • +
  • Fixed for wrong offset when writing headers and converting them to blocks, +only triggered when header is larger than 4k.
  • +
  • Preserve _revs_limit and instance_start_time after compaction.
  • +
+
+
+

13.6.2.10. Test Suite

+
    +
  • Made the test suite overall more reliable.
  • +
+
+
+

13.6.2.11. View Server

+
    +
  • Provide a UUID to update functions (and all other functions) that they can +use to create new docs.
  • +
  • Upgrade CommonJS modules support to 1.1.1.
  • +
  • Fixed erlang filter funs and normalize filter fun API.
  • +
  • Fixed hang in view shutdown.
  • +
+
+
+

13.6.2.12. URL Rewriter & Vhosts

+
    +
  • Allow more complex keys in rewriter.
  • +
  • Allow global rewrites so system defaults are available in vhosts.
  • +
  • Allow isolation of databases with vhosts.
  • +
  • Fix issue with passing variables to query parameters.
  • +
+
+
+
+

13.6.3. Version 0.11.0

+
+

13.6.3.1. Build and System Integration

+
    +
  • Updated and improved source documentation.
  • +
  • Fixed distribution preparation for building on Mac OS X.
  • +
  • Added support for building a Windows installer as part of ‘make dist’.
  • +
  • Bug fix for building couch.app’s module list.
  • +
  • ETap tests are now run during make distcheck. This included a number of +updates to the build system to properly support VPATH builds.
  • +
  • Gavin McDonald setup a build-bot instance. More info can be found at +http://ci.apache.org/buildbot.html
  • +
+
+
+

13.6.3.2. Futon

+
    +
  • Added a button for view compaction.
  • +
  • JSON strings are now displayed as-is in the document view, without the +escaping of new-lines and quotes. That dramatically improves readability of +multi-line strings.
  • +
  • Same goes for editing of JSON string values. When a change to a field value is +submitted, and the value is not valid JSON it is assumed to be a string. This +improves editing of multi-line strings a lot.
  • +
  • Hitting tab in textareas no longer moves focus to the next form field, but +simply inserts a tab character at the current caret position.
  • +
  • Fixed some font declarations.
  • +
+
+
+

13.6.3.3. HTTP Interface

+
    +
  • Provide Content-MD5 header support for attachments.
  • +
  • Added URL Rewriter handler.
  • +
  • Added virtual host handling.
  • +
+
+
+

13.6.3.4. Replication

+
    +
  • Added option to implicitly create replication target databases.
  • +
  • Avoid leaking file descriptors on automatic replication restarts.
  • +
  • Added option to replicate a list of documents by id.
  • +
  • Allow continuous replication to be cancelled.
  • +
+
+
+

13.6.3.5. Runtime Statistics

+
    +
  • Statistics are now calculated for a moving window instead of non-overlapping +timeframes.
  • +
  • Fixed a problem with statistics timers and system sleep.
  • +
  • Moved statistic names to a term file in the priv directory.
  • +
+
+
+

13.6.3.6. Security

+
    +
  • Fixed CVE-2010-0009: Apache CouchDB Timing Attack Vulnerability.
  • +
  • Added default cookie-authentication and users database.
  • +
  • Added Futon user interface for user signup and login.
  • +
  • Added per-database reader access control lists.
  • +
  • Added per-database security object for configuration data in validation +functions.
  • +
  • Added proxy authentication handler
  • +
+
+
+

13.6.3.7. Storage System

+
    +
  • Adds batching of multiple updating requests, to improve throughput with many +writers. Removed the now redundant couch_batch_save module.
  • +
  • Adds configurable compression of attachments.
  • +
+
+
+

13.6.3.8. View Server

+
    +
  • Added optional ‘raw’ binary collation for faster view builds where Unicode +collation is not important.
  • +
  • Improved view index build time by reducing ICU collation callouts.
  • +
  • Improved view information objects.
  • +
  • Bug fix for partial updates during view builds.
  • +
  • Move query server to a design-doc based protocol.
  • +
  • Use json2.js for JSON serialization for compatiblity with native JSON.
  • +
  • Major refactoring of couchjs to lay the groundwork for disabling cURL +support. The new HTTP interaction acts like a synchronous XHR. Example usage +of the new system is in the JavaScript CLI test runner.
  • +
+
+
+
+
+

13.7. 0.10.x Branch

+ +
+

13.7.1. Version 0.10.1

+
+

13.7.1.1. Build and System Integration

+
    +
  • Test suite now works with the distcheck target.
  • +
+
+
+

13.7.1.2. Replicator

+
    +
  • Stability enhancements regarding redirects, timeouts, OAuth.
  • +
+
+
+

13.7.1.3. Query Server

+
    +
  • Avoid process leaks
  • +
  • Allow list and view to span languages
  • +
+
+
+

13.7.1.4. Stats

+
    +
  • Eliminate new process flood on system wake
  • +
+
+
+
+

13.7.2. Version 0.10.0

+
+

13.7.2.1. Build and System Integration

+
    +
  • Changed couchdb script configuration options.
  • +
  • Added default.d and local.d configuration directories to load sequence.
  • +
+
+
+

13.7.2.2. HTTP Interface

+
    +
  • Added optional cookie-based authentication handler.
  • +
  • Added optional two-legged OAuth authentication handler.
  • +
+
+
+

13.7.2.3. Storage Format

+
    +
  • Add move headers with checksums to the end of database files for extra robust +storage and faster storage.
  • +
+
+
+

13.7.2.4. View Server

+
    +
  • Added native Erlang views for high-performance applications.
  • +
+
+
+
+
+

13.8. 0.9.x Branch

+ +
+

13.8.1. Version 0.9.2

+
+

13.8.1.1. Build and System Integration

+
    +
  • Remove branch callbacks to allow building couchjs against newer versions of +Spidermonkey.
  • +
+
+
+

13.8.1.2. Replication

+
    +
  • Fix replication with 0.10 servers initiated by an 0.9 server (COUCHDB-559).
  • +
+
+
+
+

13.8.2. Version 0.9.1

+
+

13.8.2.1. Build and System Integration

+
    +
  • PID file directory is now created by the SysV/BSD daemon scripts.
  • +
  • Fixed the environment variables shown by the configure script.
  • +
  • Fixed the build instructions shown by the configure script.
  • +
  • Updated ownership and permission advice in README for better security.
  • +
+
+
+

13.8.2.2. Configuration and stats system

+
    +
  • Corrected missing configuration file error message.
  • +
  • Fixed incorrect recording of request time.
  • +
+
+
+

13.8.2.3. Database Core

+
    +
  • Document validation for underscore prefixed variables.
  • +
  • Made attachment storage less sparse.
  • +
  • Fixed problems when a database with delayed commits pending is considered +idle, and subject to losing changes when shutdown. (COUCHDB-334)
  • +
+
+
+

13.8.2.4. External Handlers

+
    +
  • Fix POST requests.
  • +
+
+
+

13.8.2.5. Futon

+
    +
  • Redirect when loading a deleted view URI from the cookie.
  • +
+
+
+

13.8.2.6. HTTP Interface

+
    +
  • Attachment requests respect the “rev” query-string parameter.
  • +
+
+
+

13.8.2.7. JavaScript View Server

+
    +
  • Useful JavaScript Error messages.
  • +
+
+
+

13.8.2.8. Replication

+
    +
  • Added support for Unicode characters transmitted as UTF-16 surrogate pairs.
  • +
  • URL-encode attachment names when necessary.
  • +
  • Pull specific revisions of an attachment, instead of just the latest one.
  • +
  • Work around a rare chunk-merging problem in ibrowse.
  • +
  • Work with documents containing Unicode characters outside the Basic +Multilingual Plane.
  • +
+
+
+
+

13.8.3. Version 0.9.0

+
+

13.8.3.1. Build and System Integration

+
    +
  • The couchdb script now supports system chainable configuration files.
  • +
  • The Mac OS X daemon script now redirects STDOUT and STDERR like SysV/BSD.
  • +
  • The build and system integration have been improved for portability.
  • +
  • Added COUCHDB_OPTIONS to etc/default/couchdb file.
  • +
  • Remove COUCHDB_INI_FILE and COUCHDB_PID_FILE from etc/default/couchdb file.
  • +
  • Updated configure.ac to manually link libm for portability.
  • +
  • Updated configure.ac to extended default library paths.
  • +
  • Removed inets configuration files.
  • +
  • Added command line test runner.
  • +
  • Created dev target for make.
  • +
+
+
+

13.8.3.2. Configuration and stats system

+
    +
  • Separate default and local configuration files.
  • +
  • HTTP interface for configuration changes.
  • +
  • Statistics framework with HTTP query API.
  • +
+
+
+

13.8.3.3. Database Core

+
    +
  • Faster B-tree implementation.
  • +
  • Changed internal JSON term format.
  • +
  • Improvements to Erlang VM interactions under heavy load.
  • +
  • User context and administrator role.
  • +
  • Update validations with design document validation functions.
  • +
  • Document purge functionality.
  • +
  • Ref-counting for database file handles.
  • +
+
+
+

13.8.3.4. Design Document Resource Paths

+
    +
  • Added httpd_design_handlers config section.
  • +
  • Moved _view to httpd_design_handlers.
  • +
  • Added ability to render documents as non-JSON content-types with _show and +_list functions, which are also httpd_design_handlers.
  • +
+
+
+

13.8.3.5. Futon Utility Client

+
    +
  • Added pagination to the database listing page.
  • +
  • Implemented attachment uploading from the document page.
  • +
  • Added page that shows the current configuration, and allows modification of +option values.
  • +
  • Added a JSON “source view” for document display.
  • +
  • JSON data in view rows is now syntax highlighted.
  • +
  • Removed the use of an iframe for better integration with browser history and +bookmarking.
  • +
  • Full database listing in the sidebar has been replaced by a short list of +recent databases.
  • +
  • The view editor now allows selection of the view language if there is more +than one configured.
  • +
  • Added links to go to the raw view or document URI.
  • +
  • Added status page to display currently running tasks in CouchDB.
  • +
  • JavaScript test suite split into multiple files.
  • +
  • Pagination for reduce views.
  • +
+
+
+

13.8.3.6. HTTP Interface

+
    +
  • Added client side UUIDs for idempotent document creation
  • +
  • HTTP COPY for documents
  • +
  • Streaming of chunked attachment PUTs to disk
  • +
  • Remove negative count feature
  • +
  • Add include_docs option for view queries
  • +
  • Add multi-key view post for views
  • +
  • Query parameter validation
  • +
  • Use stale=ok to request potentially cached view index
  • +
  • External query handler module for full-text or other indexers.
  • +
  • Etags for attachments, views, shows and lists
  • +
  • Show and list functions for rendering documents and views as developer +controlled content-types.
  • +
  • Attachment names may use slashes to allow uploading of nested directories +(useful for static web hosting).
  • +
  • Option for a view to run over design documents.
  • +
  • Added newline to JSON responses. Closes bike-shed.
  • +
+
+
+

13.8.3.7. Replication

+
    +
  • Using ibrowse.
  • +
  • Checkpoint replications so failures are less expensive.
  • +
  • Automatically retry of failed replications.
  • +
  • Stream attachments in pull-replication.
  • +
+
+
+
+
+

13.9. 0.8.x Branch

+ +
+

13.9.1. Version 0.8.1-incubating

+
+

13.9.1.1. Build and System Integration

+
    +
  • The couchdb script no longer uses awk for configuration checks as this +was causing portability problems.
  • +
  • Updated sudo example in README to use the -i option, this fixes +problems when invoking from a directory the couchdb user cannot access.
  • +
+
+
+

13.9.1.2. Database Core

+
    +
  • Fix for replication problems where the write queues can get backed up if the +writes aren’t happening fast enough to keep up with the reads. For a large +replication, this can exhaust memory and crash, or slow down the machine +dramatically. The fix keeps only one document in the write queue at a time.
  • +
  • Fix for databases sometimes incorrectly reporting that they contain 0 +documents after compaction.
  • +
  • CouchDB now uses ibrowse instead of inets for its internal HTTP client +implementation. This means better replication stability.
  • +
+
+
+

13.9.1.3. Futon

+
    +
  • The view selector dropdown should now work in Opera and Internet Explorer +even when it includes optgroups for design documents. (COUCHDB-81)
  • +
+
+
+

13.9.1.4. JavaScript View Server

+
    +
  • Sealing of documents has been disabled due to an incompatibility with +SpiderMonkey 1.9.
  • +
  • Improve error handling for undefined values emitted by map functions. +(COUCHDB-83)
  • +
+
+
+

13.9.1.5. HTTP Interface

+
    +
  • Fix for chunked responses where chunks were always being split into multiple +TCP packets, which caused problems with the test suite under Safari, and in +some other cases.
  • +
  • Fix for an invalid JSON response body being returned for some kinds of +views. (COUCHDB-84)
  • +
  • Fix for connections not getting closed after rejecting a chunked request. +(COUCHDB-55)
  • +
  • CouchDB can now be bound to IPv6 addresses.
  • +
  • The HTTP Server header now contains the versions of CouchDB and Erlang.
  • +
+
+
+
+

13.9.2. Version 0.8.0-incubating

+
+

13.9.2.1. Build and System Integration

+
    +
  • CouchDB can automatically respawn following a server crash.
  • +
  • Database server no longer refuses to start with a stale PID file.
  • +
  • System logrotate configuration provided.
  • +
  • Improved handling of ICU shared libraries.
  • +
  • The couchdb script now automatically enables SMP support in Erlang.
  • +
  • The couchdb and couchjs scripts have been improved for portability.
  • +
  • The build and system integration have been improved for portability.
  • +
+
+
+

13.9.2.2. Database Core

+
    +
  • The view engine has been completely decoupled from the storage engine. Index +data is now stored in separate files, and the format of the main database +file has changed.
  • +
  • Databases can now be compacted to reclaim space used for deleted documents +and old document revisions.
  • +
  • Support for incremental map/reduce views has been added.
  • +
  • To support map/reduce, the structure of design documents has changed. View +values are now JSON objects containing at least a map member, and +optionally a reduce member.
  • +
  • View servers are now identified by name (for example javascript) instead of +by media type.
  • +
  • Automatically generated document IDs are now based on proper UUID generation +using the crypto module.
  • +
  • The field content-type in the JSON representation of attachments has been +renamed to content_type (underscore).
  • +
+
+
+

13.9.2.3. Futon

+
    +
  • When adding a field to a document, Futon now just adds a field with an +autogenerated name instead of prompting for the name with a dialog. The name +is automatically put into edit mode so that it can be changed immediately.
  • +
  • Fields are now sorted alphabetically by name when a document is displayed.
  • +
  • Futon can be used to create and update permanent views.
  • +
  • The maximum number of rows to display per page on the database page can now +be adjusted.
  • +
  • Futon now uses the XMLHTTPRequest API asynchronously to communicate with the +CouchDB HTTP server, so that most operations no longer block the browser.
  • +
  • View results sorting can now be switched between ascending and descending by +clicking on the Key column header.
  • +
  • Fixed a bug where documents that contained a @ character could not be +viewed. (COUCHDB-12)
  • +
  • The database page now provides a Compact button to trigger database +compaction. (COUCHDB-38)
  • +
  • Fixed portential double encoding of document IDs and other URI segments in +many instances. (COUCHDB-39)
  • +
  • Improved display of attachments.
  • +
  • The JavaScript Shell has been removed due to unresolved licensing issues.
  • +
+
+
+

13.9.2.4. JavaScript View Server

+
    +
  • SpiderMonkey is no longer included with CouchDB, but rather treated as a +normal external dependency. A simple C program (_couchjs) is provided that +links against an existing SpiderMonkey installation and uses the interpreter +embedding API.
  • +
  • View functions using the default JavaScript view server can now do logging +using the global log(message) function. Log messages are directed into the +CouchDB log at INFO level. (COUCHDB-59)
  • +
  • The global map(key, value) function made available to view code has been +renamed to emit(key, value).
  • +
  • Fixed handling of exceptions raised by view functions.
  • +
+
+
+

13.9.2.5. HTTP Interface

+
    +
  • CouchDB now uses MochiWeb instead of inets for the HTTP server +implementation. Among other things, this means that the extra configuration +files needed for inets (such as couch_httpd.conf) are no longer used.
  • +
  • The HTTP interface now completely supports the HEAD method. (COUCHDB-3)
  • +
  • Improved compliance of Etag handling with the HTTP specification. +(COUCHDB-13)
  • +
  • Etags are no longer included in responses to document GET requests that +include query string parameters causing the JSON response to change without +the revision or the URI having changed.
  • +
  • The bulk document update API has changed slightly on both the request and the +response side. In addition, bulk updates are now atomic.
  • +
  • CouchDB now uses TCP_NODELAY to fix performance problems with persistent +connections on some platforms due to nagling.
  • +
  • Including a ?descending=false query string parameter in requests to views +no longer raises an error.
  • +
  • Requests to unknown top-level reserved URLs (anything with a leading +underscore) now return a unknown_private_path error instead of the +confusing illegal_database_name.
  • +
  • The Temporary view handling now expects a JSON request body, where the JSON +is an object with at least a map member, and optional reduce and +language members.
  • +
  • Temporary views no longer determine the view server based on the Content-Type +header of the POST request, but rather by looking for a language member +in the JSON body of the request.
  • +
  • The status code of responses to DELETE requests is now 200 to reflect that +that the deletion is performed synchronously.
  • +
+
+
+
+
+ + +
+
+
+
+
+ + + + + +

Table Of Contents

+ + +

Previous topic

+

12. Contributing to this Documentation

+ + +

Utilities

+ + + + +

More Help

+ + +
+
+
+
+ + + + \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/changes.html couchdb-1.4.0~rc.1/share/doc/build/html/changes.html --- couchdb-1.2.0/share/doc/build/html/changes.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/changes.html 2013-08-23 10:58:55.000000000 -0400 @@ -0,0 +1,414 @@ + + + + + + + + + + 8. Changes Feed — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

8. Changes Feed

+
+

8.1. Polling

+

A list of changes made to documents in the database, in the order they were +made, can be obtained from the database’s _changes resource. You can query +the _changes resource by issuing a GET request with the following +(optional) parameters:

+ ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ParameterValueDefault ValueNotes
sinceseqnum / now0(1)
limitmaxsequencesnone(2)
descendingbooleanfalse(3)
feednormal / longpoll / continuous / eventsourcenormal(4)
heartbeatmilliseconds60000(5)
timeoutmilliseconds60000(6)
filterdesigndoc/filtername / _viewnone(7)
include_docsbooleanfalse(8)
styleall_docs / main_onlymain_only(9)
viewdesigndoc/filternamenone(10)
+

Notes:

+
    +
  1. Start the results from the change immediately after the given sequence +number.

    +
  2. +
  3. Limit number of result rows to the specified value (note that using 0 here +has the same effect as 1).

    +
  4. +
  5. Return the change results in descending sequence order (most recent change +first)

    +
  6. +
  7. Select the type of feed.

    +
  8. +
  9. Period in milliseconds after which an empty line is sent in the results. +Only applicable for longpoll or continuous feeds. Overrides any timeout +to keep the feed alive indefinitely.

    +
  10. +
  11. Maximum period in milliseconds to wait for a change before the response is +sent, even if there are no results. Only applicable for longpoll or +continuous feeds. Note that 60000 is also the default maximum timeout to +prevent undetected dead connections.

    +

    You can change the default maximum timeout in your ini-configuration:

    +
    [httpd]
    +changes_timeout=#millisecs
    +
    +
    +
  12. +
  13. Reference to a filter function from a design document +that will filter whole stream emitting only filtered events. +See the section in the book for more information.

    +
  14. +
  15. Include the associated document with each result. If there are conflicts, +only the winning revision is returned.

    +
  16. +
  17. Specifies how many revisions are returned in the changes array. +The default, main_only, will only return the current “winning” revision; +all_docs will return all leaf revisions (including conflicts and deleted +former conflicts.)

    +
  18. +
  19. Allows to use view functions as filters. It requires to set filter +special value _view to enable this feature. Documents counted as “passed” +for view filter in case if map function emits at least one record for them.

    +
  20. +
+

+Changed in version 0.11.0: added include_docs parameter

+

+Changed in version 1.2.0: added view parameter and special value _view +for filter one

+

+Changed in version 1.3.0: since parameter could take now value to start +listen changes since current seq number.

+

+Changed in version 1.3.0: eventsource feed type added.

+

By default all changes are immediately returned as a JSON object:

+
GET /somedatabase/_changes HTTP/1.1
+
+
{"results":[
+{"seq":1,"id":"fresh","changes":[{"rev":"1-967a00dff5e02add41819138abb3284d"}]},
+{"seq":3,"id":"updated","changes":[{"rev":"2-7051cbe5c8faecd085a3fa619e6e6337"}]},
+{"seq":5,"id":"deleted","changes":[{"rev":"2-eec205a9d413992850a6e32678485900"}],"deleted":true}
+],
+"last_seq":5}
+
+
+

results is the list of changes in sequential order. New and changed +documents only differ in the value of the rev; deleted documents include the +"deleted": true attribute. (In the style=all_docs mode, deleted applies +only to the current/winning revision. The other revisions listed might be +deleted even if there is no deleted property; you have to GET them +individually to make sure.)

+

last_seq is the sequence number of the last update returned. (Currently it +will always be the same as the seq of the last item in results.)

+

Sending a since param in the query string skips all changes up to and +including the given sequence number:

+
GET /somedatabase/_changes?since=3 HTTP/1.1
+
+
{"results":[
+{"seq":5,"id":"deleted","changes":[{"rev":"2-eec205a9d413992850a6e32678485900"}],"deleted":true}
+],
+"last_seq":5}
+
+
+
+
+

8.2. Long Polling

+

The longpoll feed (probably most useful used from a browser) is a more +efficient form of polling that waits for a change to occur before the response +is sent. longpoll avoids the need to frequently poll CouchDB to discover +nothing has changed!

+

The response is basically the same JSON as is sent for the normal feed.

+

A timeout limits the maximum length of time the connection is open. If there +are no changes before the timeout expires the response’s results will be an +empty list.

+
+
+

8.3. Continuous

+

Polling the CouchDB server is not a good thing to do. Setting up new HTTP +connections just to tell the client that nothing happened puts unnecessary +strain on CouchDB.

+

A continuous feed stays open and connected to the database until explicitly +closed and changes are sent to the client as they happen, i.e. in near +real-time.

+

The continuous feed’s response is a little different than the other feed types +to simplify the job of the client - each line of the response is either empty +or a JSON object representing a single change, as found in the normal feed’s +results.

+
GET /somedatabase/_changes?feed=continuous HTTP/1.1
+
+
+
{"seq":1,"id":"fresh","changes":[{"rev":"1-967a00dff5e02add41819138abb3284d"}]}
+{"seq":3,"id":"updated","changes":[{"rev":"2-7051cbe5c8faecd085a3fa619e6e6337"}]}
+{"seq":5,"id":"deleted","changes":[{"rev":"2-eec205a9d413992850a6e32678485900"}],"deleted":true}
+... tum tee tum ...
+{"seq":6,"id":"updated","changes":[{"rev":"3-825cb35de44c433bfb2df415563a19de"}]}
+
+
+

Obviously, ... tum tee tum ... does not appear in the actual response, but +represents a long pause before the change with seq 6 occurred.

+
+
+

8.4. Event Source

+

The eventsource feed provides push notifications that can be consumed in +the form of DOM events in the browser. Refer to the W3C eventsource +specification for further details. CouchDB honors the Last-Event-ID header, +and if it’s present it will take precedence over the since query parameter.

+
GET /somedatabase/_changes?feed=eventsource HTTP/1.1
+
+
+
// define the event handling function
+if (window.EventSource) {
+
+  var source = new EventSource("/somedatabase/_changes?feed=eventsource");
+  source.onerror = function(e) {
+    alert('EventSource failed.');
+  };
+
+  var results = [];
+  var sourceListener = function(e) {
+    var data = JSON.parse(e.data);
+    results.push(data);
+  };
+
+  // start listening for events
+  source.addEventListener('message', sourceListener, false);
+
+  // stop listening for events
+  source.removeEventListener('message', sourceListener, false);
+
+}
+
+
+
+

Note

+

EventSource connections are subject to cross-origin resource sharing +restrictions. You might need to use the experimental CORS support to get the EventSource to work in your application.

+
+
+
+ + +
+
+
+
+
+ + + + + +

Table Of Contents

+ + +

Previous topic

+

7. Query Servers

+

Next topic

+

9. API Reference

+ + +

Utilities

+ + + + +

More Help

+ + +
+
+
+
+ + + + \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/config_reference.html couchdb-1.4.0~rc.1/share/doc/build/html/config_reference.html --- couchdb-1.2.0/share/doc/build/html/config_reference.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/config_reference.html 2013-08-23 10:58:56.000000000 -0400 @@ -0,0 +1,743 @@ + + + + + + + + + + 11. Configuration Reference — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

11. Configuration Reference

+
+

11.1. Configuration Groups

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
SectionDescription
attachmentsAttachment options
couchdbCouchDB specific options
couch_httpd_authHTTPD Authentication options
daemonsDaemons and background processes
httpdHTTPD Server options
httpd_db_handlersDatabase Operation handlers
httpd_design_handlersHandlers for design document operations
httpd_global_handlersHandlers for global operations
logLogging options
query_serversQuery Server options
query_server_configQuery server options
replicatorReplicator Options
sslSSL (Secure Sockets Layer) Options
statsStatistics options
uuidsUUID generation options
corsCross Origin Resource Sharing settings
+
+
+

11.2. attachments Configuration Options

+ ++++ + + + + + + + + + + + + + +
OptionDescription
compressible_typescompressible_types
compression_levelcompression_level
+
+
+

11.3. couchdb Configuration Options

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
OptionDescription
database_dirdatabase_dir
delayed_commitsdelayed_commits
max_attachment_chunk_sizemax_attachment_chunk_size
max_dbs_openmax_dbs_open
max_document_sizemax_document_size
os_process_timeoutos_process_timeout
uri_fileuri_file
util_driver_dirutil_driver_dir
view_index_dirview_index_dir
+
+
+

11.4. daemons Configuration Options

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
OptionDescription
auth_cacheauth_cache
db_update_notifierdb_update_notifier
external_managerexternal_manager
httpdhttpd
httpsdEnabled HTTPS service
query_serversquery_servers
stats_aggregatorstats_aggregator
stats_collectorstats_collector
uuidsuuids
view_managerview_manager
+
+
+

11.5. httpd_db_handlers Configuration Options

+ ++++ + + + + + + + + + + + + + + + + + + + + + + +
OptionDescription
_changes_changes
_compact_compact
_design_design
_temp_view_temp_view
_view_cleanup_view_cleanup
+
+
+

11.6. couch_httpd_auth Configuration Options

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
OptionDescription
auth_cache_sizeauth_cache_size
authentication_dbauthentication_db
authentication_redirectauthentication_redirect
require_valid_userrequire_valid_user
timeouttimeout
iterationsPassword key derivation iterations
users_db_publicAllow all users to view user documents
public_fieldsWorld-viewable user document fields
+
+

Note

+

Using the public_fields whitelist for user document properties requires +setting the users_db_public option to true (the latter option has no +other purpose).

+
+
+
+

11.7. httpd Configuration Options

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
OptionDescription
allow_jsonpallow_jsonp
authentication_handlersauthentication_handlers
bind_addressbind_address
default_handlerdefault_handler
max_connectionsmax_connections
nodelayEnable TCP_NODELAY
portport
secure_rewritessecure_rewrites
vhost_global_handlersvhost_global_handlers
enable_corsenables CORS functionality when true
+
+
+

11.8. httpd_design_handlers Configuration Options

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
OptionDescription
_info_info
_list_list
_rewrite_rewrite
_show_show
_update_update
_view_view
+
+
+

11.9. httpd_global_handlers Configuration Options

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
OptionDescription
//
_active_tasks_active_tasks
_all_dbs_all_dbs
_config_config
_log_log
_oauth_oauth
_replicate_replicate
_restart_restart
_session_session
_stats_stats
_utils_utils
_uuids_uuids
favicon.icofavicon.ico
+
+
+

11.10. log Configuration Options

+ ++++ + + + + + + + + + + + + + + + + +
OptionDescription
filefile
include_saslinclude_sasl
levellevel
+
+
+

11.11. query_servers Configuration Options

+ ++++ + + + + + + + + + + +
OptionDescription
javascriptjavascript
+
+
+

11.12. query_server_config Configuration Options

+ ++++ + + + + + + + + + + +
OptionDescription
reduce_limitreduce_limit
+
+
+

11.13. replicator Configuration Options

+ ++++ + + + + + + + + + + + + + +
OptionDescription
max_http_pipeline_sizemax_http_pipeline_size
max_http_sessionsmax_http_sessions
+
+
+

11.14. stats Configuration Options

+ ++++ + + + + + + + + + + + + + +
OptionDescription
raterate
samplessamples
+
+
+

11.15. uuids Configuration Options

+ ++++ + + + + + + + + + + +
OptionDescription
algorithmalgorithm
+
+
+

11.16. cors Configuration Options

+ ++++ + + + + + + + + + + + + + + + + +
OptionDescription
originsList of origins, separated by a comma +(protocol, host, optional port)
methodsaccepted HTTP methods
credentialstrue sends additional header +Access-Control-Allow-Credentials=true
+

Note that credentials=true and origins=* are mutually exclusive.

+
+
+

11.17. cors vhost Configuration

+

The same configuration options for cors overall may be applied to an +individual vhost, within a specific section header, for example.com the +appropriate section would be: [cors:http://example.com]

+
+
+ + +
+
+
+ +
+
+ + + + \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/configuring.html couchdb-1.4.0~rc.1/share/doc/build/html/configuring.html --- couchdb-1.2.0/share/doc/build/html/configuring.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/configuring.html 2013-08-23 10:58:57.000000000 -0400 @@ -0,0 +1,682 @@ + + + + + + + + + + 3. Configuration — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

3. Configuration

+
+

3.1. CouchDB Configuration Files

+
+
+

3.2. Configuration File Locations

+

CouchDB reads files from the following locations, in the following +order.

+
    +
  1. PREFIX/default.ini
  2. +
  3. PREFIX/default.d/*
  4. +
  5. PREFIX/local.ini
  6. +
  7. PREFIX/local.d/*
  8. +
+

Settings in successive documents override the settings in earlier +entries. For example, setting the bind_address parameter in +local.ini would override any setting in default.ini.

+
+

Warning

+

The default.ini file may be overwritten during an upgrade or +re-installation, so localised changes should be made to the +local.ini file or files within the local.d directory.

+
+
+
+

3.3. Update Notifications

+
+
+

3.4. MochiWeb Server Options

+

Server options for the MochiWeb component of CouchDB can be added to the +configuration files. Settings should be added to the server_options +option of the [httpd] section of local.ini. For example:

+
[httpd]
+server_options = [{backlog, 128}, {acceptor_pool_size, 16}]
+
+
+
+
+

3.5. Socket Options Configuration Setting

+

The socket options for the listening socket in CouchDB can now be set +within the CouchDB configuration file. The setting should be added to +the [httpd] section of the file using the option name +socket_options. The specification is as a list of tuples. For +example:

+
[httpd]
+socket_options = [{recbuf, 262144}, {sndbuf, 262144}, {nodelay, true}]
+
+
+

The options supported are a subset of full options supported by the +TCP/IP stack. A list of the supported options are provided in the +Erlang inet documentation.

+
+
+

3.6. Virtual Hosts

+

CouchDB, since 0.11.0, can map requests to different locations based on +the Host header, even if they arrive on the some inbound IP address.

+

This allows different virtual hosts on the same machine to map to different +databases or design documents, etc. The most common use case is to map a +virtual host to a Rewrite Handler, to provide full control over the +application’s URIs.

+

To add a virtual host, add a CNAME pointer to the DNS for your domain +name. For development and testing, it is sufficient to add an entry in +the hosts file, typically /etc/hosts` on Unix-like operating systems:

+
# CouchDB vhost definitions, refer to local.ini for further details
+127.0.0.1       sofa.couchdb
+
+
+

Test that this is working:

+
$ ping sofa.couchdb
+PING sofa.couchdb (127.0.0.1) 56(84) bytes of data.
+64 bytes from localhost.localdomain (127.0.0.1): icmp_req=1 ttl=64 time=0.025 ms
+64 bytes from localhost.localdomain (127.0.0.1): icmp_req=2 ttl=64 time=0.051 ms
+^C
+
+
+

Finally, add an entry to your configuration file in the [vhosts] +section:

+
[vhosts]
+sofa.couchdb:5984 = /sofa/_design/sofa/_rewrite
+
+
+

If your CouchDB is listening on the default HTTP port, or is sitting +behind a proxy, then don’t specify a port number in the vhost key.

+

With the above setup, a request to http://sofa.couchdb:5984/sweet-o +will be mapped to +http://127.0.0.1:5984/sofa/_design/sofa/_rewrite/sweet-o

+

+New in version 0.11.0: added vhosts functionality

+
+
+

3.7. HTTP Rewrite Handler

+

Following on from virtual hosts, CouchDB includes a custom URL rewriter. +All rewriting is done from /dbname/_design/ddocname/_rewrite by default.

+

The rewriter is flexible, and can handle methods and custom query formats.

+

Each rule should be in the rewrites top-level key of the design doc. +Example of a complete rule :

+
{
+    ....
+    "rewrites": [
+    {
+        "from": "",
+        "to": "index.html",
+        "method": "GET",
+        "query": {}
+    }
+    ]
+}
+
+

from: is the path rule used to bind current uri to the rule. It +uses pattern matching for that.

+

to: rule to rewrite an url. It can contain variables depending on +binding variables discovered during pattern matching and query args +(url args and from the query member.)

+

method: method to bind the request method to the rule. If method +is missing, any method will be matched in the rewrite.

+

query: optional query arguments, that may contain dynamic variables, +by binding keys in the to be used with the matching URL.

+

to and from are paths with patterns. The pattern can be strings starting +with : or *, for example /somepath/:var/*.

+

The pattern matching is done by first matching the request method to a +rule. Then it will try to match the path to one specific rule. If no rule +match, then a 404 error is displayed.

+

The path is converted into an erlang list, by regex splitting on /. Each +variable is converted into an atom. The subsequent pattern matching step is +done by splitting / in the request url into a list of atoms. A string +pattern will match the equivalent token. The * atom will match any number +of tokens, but may only be present as the last pattern in the path. If all +tokens are matched, and all path terms have been consumed, then the overall +path specification matches.

+

Once a matching from rule is found we rewrite the request url using the +from, to, and query members. Each identified token will be reused +within the rule, and in the subsequent query if required. The identified +tokens are matched to the rule and will replace var. If * is found in +the rule it will contain any remaining suffix.

+

The rewriter is re-entrant, and has a configurable recursion limit, set +by default at 100.

+
+
+

3.8. Configuring Server Administrators

+

A default CouchDB install provides admin-level access to all connecting users. +This configuration is known as Admin Party, and is not recommended for +in-production usage. You can crash the party simply by creating the first +admin account. CouchDB server administrators and passwords are not stored +in the _users database, but in the local.ini file, which should be +appropriately secured and readable only by system administrators.

+
[admins]
+;admin = mysecretpassword
+admin = -hashed-6d3c30241ba0aaa4e16c6ea99224f915687ed8cd,7f4a3e05e0cbc6f48a0035e3508eef90
+architect = -pbkdf2-43ecbd256a70a3a2f7de40d2374b6c3002918834,921a12f74df0c1052b3e562a23cd227f,10000
+
+
+

Administrators can be added directly to the [admins] section, and when +CouchDB is restarted, the passwords will be salted and encrypted. You may +also use the HTTP interface to create administrator accounts; this way, +you don’t need to restart CouchDB, and there’s no need to temporarily store +or transmit passwords in plaintext. The HTTP _config/admins endpoint +supports querying, deleting or creating new admin accounts:

+
shell> GET /_config/admins HTTP/1.1
+    Accept: application/json
+    Host: localhost:5984
+
+HTTP/1.1 200 OK
+    Cache-Control: must-revalidate
+    Content-Length: 196
+    Content-Type: application/json
+    Date: Fri, 30 Nov 2012 11:37:18 GMT
+    Server: CouchDB/1.3.0 (Erlang OTP/R15B02)
+
+
+
{
+    "admin": "-hashed-6d3c30241ba0aaa4e16c6ea99224f915687ed8cd,7f4a3e05e0cbc6f48a0035e3508eef90",
+    "architect": "-pbkdf2-43ecbd256a70a3a2f7de40d2374b6c3002918834,921a12f74df0c1052b3e562a23cd227f,10000"
+}
+
+
+

If you already have a salted, encrypted password string (for example, +from an old local.ini file, or from a different CouchDB server), then +you can store the “raw” encrypted string, without having CouchDB doubly +encrypt it.

+
shell> PUT /_config/admins/architect?raw=true HTTP/1.1
+    Accept: application/json
+    Content-Type: application/json
+    Content-Length: 89
+    Host: localhost:5984
+
+    "-pbkdf2-43ecbd256a70a3a2f7de40d2374b6c3002918834,921a12f74df0c1052b3e562a23cd227f,10000"
+
+HTTP/1.1 200 OK
+    Cache-Control: must-revalidate
+    Content-Length: 89
+    Content-Type: application/json
+    Date: Fri, 30 Nov 2012 11:39:18 GMT
+    Server: CouchDB/1.3.0 (Erlang OTP/R15B02)
+
+
+
"-pbkdf2-43ecbd256a70a3a2f7de40d2374b6c3002918834,921a12f74df0c1052b3e562a23cd227f,10000"
+
+
+

Further details are available in security_, including configuring the +work factor for PBKDF2, and the algorithm itself at +PBKDF2 (RFC-2898).

+

+New in version 1.3.0: PBKDF2 server-side hashed salted password support added, +now as a synchronous call for the _config/admins API.

+
+
+

3.9. OS Daemons

+

CouchDB now supports starting external processes. The support is simple +and enables CouchDB to start each configured OS daemon. If the daemon +stops at any point, CouchDB will restart it (with protection to ensure +regularly failing daemons are not repeatedly restarted).

+

The daemon starting process is one-to-one; for each each configured +daemon in the configuration file, CouchDB will start exactly one +instance. If you need to run multiple instances, then you must create +separate individual configurations. Daemons are configured within the +[os_daemons] section of your configuration file (local.ini). The +format of each configured daemon is:

+
NAME = PATH ARGS
+
+
+

Where NAME is an arbitrary (and unique) name to identify the daemon; +PATH is the full path to the daemon to be executed; ARGS are any +required arguments to the daemon.

+

For example:

+
[os_daemons]
+basic_responder = /usr/local/bin/responder.js
+
+
+

There is no interactivity between CouchDB and the running process, but +you can use the OS Daemons service to create new HTTP servers and +responders and then use the new proxy service to redirect requests and +output to the CouchDB managed service. For more information on proxying, +see HTTP Proxying. For further background on the OS Daemon service, see +CouchDB Externals API.

+
+
+

3.10. Native SSL Support

+

CouchDB 1.4 supports SSL natively. All your secure connection needs can +now be served without needing to setup and maintain a separate proxy server +that handles SSL.

+

SSL setup can be tricky, but the configuration in CouchDB was designed +to be as easy as possible. All you need is two files; a certificate and +a private key. If you bought an official SSL certificate from a +certificate authority, both should be in your possession already.

+

If you just want to try this out and don’t want to pay anything upfront, +you can create a self-signed certificate. Everything will work the same, +but clients will get a warning about an insecure certificate.

+

You will need the OpenSSL command line tool installed. It probably +already is.

+
shell> mkdir cert && cd cert
+shell> openssl genrsa > privkey.pem
+shell> openssl req -new -x509 -key privkey.pem -out mycert.pem -days 1095
+shell> ls
+mycert.pem privkey.pem
+
+

Now, you need to edit CouchDB’s configuration, either by editing your +local.ini file or using the /_config API calls or the +configuration screen in Futon. Here is what you need to do in +local.ini, you can infer what needs doing in the other places.

+

Be sure to make these edits. Under [daemons] you should see:

+
; enable SSL support by uncommenting the following line and supply the PEM's below.
+; the default ssl port CouchDB listens on is 6984
+;httpsd = {couch_httpd, start_link, [https]}
+
+

Here uncomment the last line:

+
httpsd = {couch_httpd, start_link, [https]}
+
+

Next, under [ssl] you will see:

+
;cert_file = /full/path/to/server_cert.pem
+;key_file = /full/path/to/server_key.pem
+
+

Uncomment and adjust the paths so it matches your system’s paths:

+
cert_file = /home/jan/cert/mycert.pem
+key_file = /home/jan/cert/privkey.pem
+
+

For more information please read +http://www.openssl.org/docs/HOWTO/certificates.txt.

+

Now start (or restart) CouchDB. You should be able to connect to it +using HTTPS on port 6984:

+
shell> curl https://127.0.0.1:6984/
+curl: (60) SSL certificate problem, verify that the CA cert is OK. Details:
+error:14090086:SSL routines:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed
+More details here: http://curl.haxx.se/docs/sslcerts.html
+
+curl performs SSL certificate verification by default, using a "bundle"
+of Certificate Authority (CA) public keys (CA certs). If the default
+bundle file isn't adequate, you can specify an alternate file
+using the --cacert option.
+If this HTTPS server uses a certificate signed by a CA represented in
+the bundle, the certificate verification probably failed due to a
+problem with the certificate (it might be expired, or the name might
+not match the domain name in the URL).
+If you'd like to turn off curl's verification of the certificate, use
+the -k (or --insecure) option.
+
+

Oh no what happened?! — Remember, clients will notify their users that +your certificate is self signed. curl is the client in this case and +it notifies you. Luckily you trust yourself (don’t you?) and you can +specify the -k option as the message reads:

+
shell> curl -k https://127.0.0.1:6984/
+{"couchdb":"Welcome","version":"|version|"}
+
+

All done.

+
+
+

3.11. HTTP Proxying

+

The HTTP proxy feature makes it easy to map and redirect different +content through your CouchDB URL. The proxy works by mapping a pathname +and passing all content after that prefix through to the configured +proxy address.

+

Configuration of the proxy redirect is handled through the +[httpd_global_handlers] section of the CouchDB configuration file +(typically local.ini). The format is:

+
[httpd_global_handlers]
+PREFIX = {couch_httpd_proxy, handle_proxy_req, <<"DESTINATION">>}
+
+
+

Where:

+
    +
  • PREFIX

    +

    Is the string that will be matched. The string can be any valid +qualifier, although to ensure that existing database names are not +overridden by a proxy configuration, you can use an underscore +prefix.

    +
  • +
  • DESTINATION

    +

    The fully-qualified URL to which the request should be sent. The +destination must include the http prefix. The content is used +verbatim in the original request, so you can also forward to servers +on different ports and to specific paths on the target host.

    +
  • +
+

The proxy process then translates requests of the form:

+
http://couchdb:5984/PREFIX/path
+
+
+

To:

+
DESTINATION/path
+
+
+
+

Note

+

Everything after PREFIX including the required forward slash +will be appended to the DESTINATION.

+
+

The response is then communicated back to the original client.

+

For example, the following configuration:

+
_google = {couch_httpd_proxy, handle_proxy_req, <<"http://www.google.com">>}
+
+
+

Would forward all requests for http://couchdb:5984/_google to the +Google website.

+

The service can also be used to forward to related CouchDB services, +such as Lucene:

+
[httpd_global_handlers]
+_fti = {couch_httpd_proxy, handle_proxy_req, <<"http://127.0.0.1:5985">>}
+
+
+
+

Note

+

The proxy service is basic. If the request is not identified by the +DESTINATION, or the remainder of the PATH specification is +incomplete, the original request URL is interpreted as if the +PREFIX component of that URL does not exist.

+

For example, requesting http://couchdb:5984/_intranet/media when +/media on the proxy destination does not exist, will cause the +request URL to be interpreted as http://couchdb:5984/media. Care +should be taken to ensure that both requested URLs and destination +URLs are able to cope.

+
+
+
+

3.12. Cross-Origin Resource Sharing

+

CORS, or “Cross-Origin Resource Sharing”, allows a resource such as a web +page running JavaScript inside a browser, to make AJAX requests +(XMLHttpRequests) to a different domain, without compromising the security +of either party.

+

A typical use case is to have a static website hosted on a CDN make +requests to another resource, such as a hosted CouchDB instance. This +avoids needing an intermediary proxy, using JSONP or similar workarounds +to retrieve and host content.

+

While CouchDB’s integrated HTTP server and support for document attachments +makes this less of a constraint for pure CouchDB projects, there are many +cases where separating the static content from the database access is +desirable, and CORS makes this very straightforward.

+

By supporting CORS functionality, a CouchDB instance can accept direct +connections to protected databases and instances, without the browser +functionality being blocked due to same-origin constraints. CORS is +supported today on over 90% of recent browsers.

+

CORS support is provided as experimental functionality in 1.3.0, and as such +will need to be enabled specifically in CouchDB’s configuration. While all +origins are forbidden from making requests by default, support is available +for simple requests, preflight requests and per-vhost configuration.

+

+New in version 1.3.0.

+
+

3.12.1. Enabling CORS

+

To enable CORS support, you need to set the enable_cors = true option +in the [httpd] section of local.ini, and add a [cors] section +containing a origins = * setting. Note that by default, no origins are +accepted; you must either use a wildcard or whitelist.

+
[httpd]
+enable_cors = true
+
+[cors]
+origins = *
+
+
+
+
+

3.12.2. Passing Credentials

+

By default, neither authentication headers nor cookies are included in +requests and responses. To do so requires both setting +XmlHttpRequest.withCredentials = true on the request object in the +browser and enabling credentials support in CouchDB.

+
[cors]
+credentials = true
+
+
+

CouchDB will respond to a credentials-enabled CORS request with an additional +header, Access-Control-Allow-Credentials=true.

+
+
+

3.12.3. Tightening Access

+

Access can be restricted by protocol, host and optionally by port:

+
[cors]
+; List of origins, separated by a comma (protocol, host, optional port)
+; refer to http://tools.ietf.org/html/rfc6454 for specification
+origins = http://localhost, https://localhost, http://www.number10.gov.uk:80
+
+
+

Specific HTTP methods may also be restricted:

+
[cors]
+; List of accepted methods, comma-separated
+; refer to http://tools.ietf.org/html/rfc2616, rfc2817, rfc5789
+methods = GET, POST, PUT, DELETE
+
+
+

You can allow additional HTTP header fields to be allowed:

+
[cors]
+; List of accepted headers separated by a comma
+headers = Authorization, Cookie
+
+
+

Note that Authorization and Cookie are not part of the standard set.

+
+
+

3.12.4. Configuration per vhost

+

All CORS-related settings may be configured on a per-vhost basis. For example, +the configuration section for http://example.com/ would be contained in:

+
[cors:http://example.com]
+credentials = false
+origins = *
+methods = GET, PUT, HEAD
+
+
+
+
+

3.12.5. Useful References

+ +

Standards and References:

+ +

Mozilla Developer Network Resources:

+ +

Client-side CORS support and usage:

+ +
+
+
+ + +
+
+
+ +
+
+ + + + \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/ddocs.html couchdb-1.4.0~rc.1/share/doc/build/html/ddocs.html --- couchdb-1.2.0/share/doc/build/html/ddocs.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/ddocs.html 2013-08-23 10:58:59.000000000 -0400 @@ -0,0 +1,976 @@ + + + + + + + + + + 6. Design Docs — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

6. Design Docs

+

In this section we’ll show how to write design documents, using the built-in +JavaScript Query Server.

+

But before we start to write our first function, let’s take a look at the list +of common objects that will be used during our code journey - we’ll be using +them extensively within each function:

+ +
+

6.1. View functions

+

Views are the primary tool used for querying and reporting on CouchDB databases.

+
+

6.1.1. Map functions

+
+
+mapfun(doc)
+
+++ + + + +
Arguments:
    +
  • doc – Processed document object.
  • +
+
+
+ +

Map functions accept a single document as the argument and (optionally) emit() +key/value pairs that are stored in a view.

+
function (doc) {
+  if (doc.type === 'post' && doc.tags && Array.isArray(doc.tags)) {
+    doc.tags.forEach(function (tag) {
+      emit(tag.toLowerCase(), 1);
+    });
+  }
+}
+
+
+

In this example a key/value pair is emitted for each value in the tags array +of a document with a type of “post”. Note that emit() may be called many +times for a single document, so the same document may be available by several +different keys.

+

Also keep in mind that each document is sealed to prevent situation when one +map function changes document state and the other one received a modified +version.

+

For efficiency reasons, documents are passed to a group of map functions - +each document is processed by group of map functions from all views of +related design document. This means that if you trigger index update for one +view in ddoc, all others will get updated too.

+

Since 1.1.0 release map function supports +CommonJS modules and access to require() function.

+
+
+

6.1.2. Reduce and rereduce functions

+
+
+redfun(keys, values[, rereduce])
+
+++ + + + + + +
Arguments:
    +
  • keys – Array of pairs docid-key for related map function result. +Always null if rereduce is running (has true value).
  • +
  • values – Array of map function result values.
  • +
  • rereduce – Boolean sign of rereduce run.
  • +
+
Returns:

Reduces values

+
+
+ +

Reduce functions takes two required arguments of keys and values lists - the +result of the related map function - and optional third one which indicates if +rereduce mode is active or not. Rereduce is using for additional reduce +values list, so when it is true there is no information about related keys +(first argument is null).

+

Note, that if produced result by reduce function is longer than initial +values list then a Query Server error will be raised. However, this behavior +could be disabled by setting reduce_limit config option to false:

+
[query_server_config]
+reduce_limit = false
+
+
+

While disabling reduce_limit might be useful for debug proposes, remember, +that main task of reduce functions is to reduce mapped result, not to make it +even bigger. Generally, your reduce function should converge rapidly to a single +value - which could be an array or similar object.

+

Also CouchDB has three built-in reduce functions. These are implemented in +Erlang and run right inside CouchDB, so they are much faster than the equivalent +JavaScript functions: _sum, _count and _stats. Their equivalents in +JavaScript below:

+
// could be replaced by _sum
+function(keys, values){
+  sum(values);
+}
+
+// could be replaced by _count
+function(keys, values, rereduce){
+  if (rereduce) {
+    return sum(values);
+  } else {
+    return values.length;
+  }
+}
+
+// could be replaced by _stats
+function(keys, values, rereduce){
+  return {
+    'sum': sum(values),
+    'min': Math.min.apply(null, values),
+    'max': Math.max.apply(null, values),
+    'count': values.length,
+    'sumsqr': (function(){
+      var sumsqr = 0;
+
+      values.forEach(function (value) {
+        sumsqr += value * value;
+      });
+
+      return sumsqr;
+    })(),
+  }
+}
+
+
+
+

Note

+

Why don’t reduce functions support CommonJS modules?

+

While map functions have limited access to stored modules through +require() function there is no such feature for reduce functions. +The reason lies deep inside in mechanism how map and reduce functions +are processed by Query Server. Let’s take a look on map functions first:

+
    +
  1. CouchDB sends all map functions for processed design document to +Query Server.
  2. +
  3. Query Server handles them one by one, compiles and puts them onto an +internal stack.
  4. +
  5. After all map functions had been processed, CouchDB will send the +remaining documents to index one by one.
  6. +
  7. The Query Server receives the document object and applies it to every function +from the stack. The emitted results are then joined into a single array and sent +back to CouchDB.
  8. +
+

Now let’s see how reduce functions are handled:

+
    +
  1. CouchDB sends as single command list of available reduce functions +with result list of key-value pairs that was previously received as +result of map functions work.
  2. +
  3. Query Server compiles reduce functions and applies them to key-value +lists. Reduced result sends back to CouchDB.
  4. +
+

As you may note, reduce functions been applied in single shot while +map ones are applied in an iterative way per each document. This means that +it’s possible for map functions to precompile CommonJS libraries and use them +during the entire view processing, but for reduce functions it will be +compiled again and again for each view result reduction, which will lead to +performance degradation (reduce function are already does hard work to make +large result smaller).

+
+
+
+
+

6.2. Show functions

+
+
+showfun(doc, req)
+
+++ + + + + + + + +
Arguments:
    +
  • doc – Processed document, may be omitted.
  • +
  • reqRequest object.
  • +
+
Returns:

Response object

+
Return type:

object or string

+
+
+ +

Show functions are used to represent documents in various formats, commonly as +HTML page with nicer formatting. They can also be used to run server-side functions +without requiring a pre-existing document.

+

Basic example of show function could be:

+
function(doc, req){
+  if (doc) {
+    return "Hello from " + doc._id + "!";
+  } else {
+    return "Hello, world!";
+  }
+}
+
+
+

Also, there is more simple way to return json encoded data:

+
function(doc, req){
+  return {
+    'json': {
+      'id': doc['_id'],
+      'rev': doc['_rev']
+    }
+  }
+}
+
+
+

and even files (this one is CouchDB logo):

+
function(doc, req){
+  return {
+    'headers': {
+      'Content-Type' : 'image/png',
+    },
+    'base64': ''.concat(
+      'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAAsV',
+      'BMVEUAAAD////////////////////////5ur3rEBn////////////////wDBL/',
+      'AADuBAe9EB3IEBz/7+//X1/qBQn2AgP/f3/ilpzsDxfpChDtDhXeCA76AQH/v7',
+      '/84eLyWV/uc3bJPEf/Dw/uw8bRWmP1h4zxSlD6YGHuQ0f6g4XyQkXvCA36MDH6',
+      'wMH/z8/yAwX64ODeh47BHiv/Ly/20dLQLTj98PDXWmP/Pz//39/wGyJ7Iy9JAA',
+      'AADHRSTlMAbw8vf08/bz+Pv19jK/W3AAAAg0lEQVR4Xp3LRQ4DQRBD0QqTm4Y5',
+      'zMxw/4OleiJlHeUtv2X6RbNO1Uqj9g0RMCuQO0vBIg4vMFeOpCWIWmDOw82fZx',
+      'vaND1c8OG4vrdOqD8YwgpDYDxRgkSm5rwu0nQVBJuMg++pLXZyr5jnc1BaH4GT',
+      'LvEliY253nA3pVhQqdPt0f/erJkMGMB8xucAAAAASUVORK5CYII=')
+  }
+}
+
+
+

But what if you need to represent data in different formats via a single function? +Functions registerType() and provides() are your the best friends in +that question:

+
function(doc, req){
+  provides('json', function(){
+    return {'json': doc}
+  });
+  provides('html', function(){
+    return '<pre>' + toJSON(doc) + '</pre>'
+  })
+  provides('xml', function(){
+    return {
+      'headers': {'Content-Type': 'application/xml'},
+      'body' : ''.concat(
+        '<?xml version="1.0" encoding="utf-8"?>\n',
+        '<doc>',
+        (function(){
+          escape = function(s){
+            return s.replace(/&quot;/g, '"')
+                    .replace(/&gt;/g, '>')
+                    .replace(/&lt;/g, '<')
+                    .replace(/&amp;/g, '&');
+          };
+          var content = '';
+          for(var key in doc){
+            if(!doc.hasOwnProperty(key)) continue;
+            var value = escape(toJSON(doc[key]));
+            var key = escape(key);
+            content += ''.concat(
+              '<' + key + '>',
+              value
+              '</' + key + '>'
+            )
+          }
+          return content;
+        })(),
+        '</doc>'
+      )
+    }
+  })
+  registerType('text-json', 'text/json')
+  provides('text-json', function(){
+    return toJSON(doc);
+  })
+}
+
+
+

This function may return html, json , xml or our custom text json format +representation of same document object with same processing rules. Probably, +the xml provider in our function needs more care to handle nested objects +correctly, and keys with invalid characters, but you’ve got the idea!

+
+

See also

+
+
CouchDB Wiki:
+
+
+
CouchDB Guide:
+
+
+
+
+
+
+

6.3. List functions

+
+
+listfun(head, req)
+
+++ + + + + + + + +
Arguments: +
Returns:

Last chunk.

+
Return type:

string

+
+
+ +

While Show functions are used to customize document presentation, List functions +are used for same purpose, but against View functions results.

+

The next list function formats view and represents it as a very simple HTML page:

+
function(head, req){
+  start({
+    'headers': {
+      'Content-Type': 'text/html'
+    }
+  });
+  send('<html><body><table>');
+  send('<tr><th>ID</th><th>Key</th><th>Value</th></tr>')
+  while(row = getRow()){
+    send(''.concat(
+      '<tr>',
+      '<td>' + toJSON(row.id) + '</td>',
+      '<td>' + toJSON(row.key) + '</td>',
+      '<td>' + toJSON(row.value) + '</td>',
+      '</tr>'
+    ));
+  }
+  send('</table></body></html>');
+}
+
+
+

Templates and styles could obviously be used to present data in a nicer +fashion, but this is an excellent starting point. Note that you may also +use registerType() and provides() functions in the same +way as for Show functions!

+
+

See also

+
+
CouchDB Wiki:
+
+
+
CouchDB Guide:
+
+
+
+
+
+
+

6.4. Update functions

+
+
+updatefun(doc, req)
+
+++ + + + + + +
Arguments: +
Returns:

Two-element array: the first element is the (updated or new) +document, which is committed to the database. If the first element +is null no document will be committed to the database. +If you are updating an existing, it should already have an _id +set, and if you are creating a new document, make sure to set its +_id to something, either generated based on the input or the +req.uuid provided. The second element is the response that will +be sent back to the caller.

+
+
+ +

Update handlers are functions that clients can request to invoke server-side +logic that will create or update a document. This feature allows a range of use +cases such as providing a server-side last modified timestamp, updating +individual fields in a document without first getting the latest revision, etc.

+

When the request to an update handler includes a document ID in the URL, the +server will provide the function with the most recent version of that document. +You can provide any other values needed by the update handler function via the +POST/PUT entity body or query string parameters of the request.

+

The basic example that demonstrates all use-cases of update handlers below:

+
function(doc, req){
+    if (!doc){
+        if ('id' in req){
+            // create new document
+            return [{'_id': req['id']}, 'New World']
+        }
+        // change nothing in database
+        return [null, 'Empty World']
+    }
+    doc['world'] = 'hello';
+    doc['edited_by'] = req['userCtx']['name']
+    return [doc, 'Edited World!']
+}
+
+
+
+

See also

+
+
CouchDB Wiki:
+
+
+
+
+
+
+

6.5. Filter functions

+
+
+filterfun(doc, req)
+
+++ + + + + + +
Arguments: +
Returns:

Boolean value: true means that doc passes the filter rules, +false that not.

+
+
+ +

Filter functions are mostly acts like Show functions and List functions: they +formats, but more correctly to say, they filters changes feed.

+
+

6.5.1. Classic filters

+

By default the changes feed emits all database documents changes. But if you’re +waiting for some special changes, processing all documents is inefficient.

+

Filters are special design document functions that allows changes feed to emit +only specific documents that pass filter rules.

+

Lets assume that our database is a mailbox and we need to to handle only new mails +(documents with status new) events. Assuming that, our filter function +will looks like next one:

+
function(doc, req){
+  // we need only `mail` documents
+  if (doc.type != 'mail'){
+    return false;
+  }
+  // we're interested only in `new` ones
+  if (doc.status != 'new'){
+    return false;
+  }
+  return true; // passed!
+}
+
+
+

Filter functions must return true in fact if document passed all defined +rules. Now, if you apply this function to changes feed it will emit only changes +about “new mails”:

+
GET /somedatabase/_changes?filter=mailbox/new_mail HTTP/1.1
+
+
{"results":[
+{"seq":1,"id":"df8eca9da37dade42ee4d7aa3401f1dd","changes":[{"rev":"1-c2e0085a21d34fa1cecb6dc26a4ae657"}]},
+{"seq":7,"id":"df8eca9da37dade42ee4d7aa34024714","changes":[{"rev":"1-29d748a6e87b43db967fe338bcb08d74"}]},
+],
+"last_seq":27}
+
+
+

Note, that last_seq number is 27, but we’d received only two records. +Seems like any other changes was about documents that hasn’t passed our filter.

+

Probably, we also need to filter changes feed of our mailbox not only by single +status value: we’re also interested in statuses like “spam” to update +spam-filter heuristic rules, “outgoing” to let mail daemon actually send mails +and so on. Creating a lot of similar functions that actually does similar work +isn’t good idea - so we need dynamic filter to go.

+

If you have noted, filter functions takes second argument as +request object - it allows to create dynamic filters +based on query parameters, user context and more.

+

The dynamic version of our filter now will be next:

+
function(doc, req){
+  // we need only `mail` documents
+  if (doc.type != 'mail'){
+    return false;
+  }
+  // we're interested only in requested status
+  if (doc.status != req.query.status){
+    return false;
+  }
+  return true; // passed!
+}
+
+
+

and now we have pass status query parameter in request to let filter match +only required documents:

+
GET /somedatabase/_changes?filter=mailbox/by_status&status=new HTTP/1.1
+
+
{"results":[
+{"seq":1,"id":"df8eca9da37dade42ee4d7aa3401f1dd","changes":[{"rev":"1-c2e0085a21d34fa1cecb6dc26a4ae657"}]},
+{"seq":7,"id":"df8eca9da37dade42ee4d7aa34024714","changes":[{"rev":"1-29d748a6e87b43db967fe338bcb08d74"}]},
+],
+"last_seq":27}
+
+
+

and we can change filter behavior with easy:

+
GET /somedatabase/_changes?filter=mailbox/by_status&status=spam HTTP/1.1
+
+
{"results":[
+{"seq":11,"id":"8960e91220798fc9f9d29d24ed612e0d","changes":[{"rev":"3-cc6ff71af716ddc2ba114967025c0ee0"}]},
+],
+"last_seq":27}
+
+
+

Combining filters with continuous feed allows to create powerful event-driven +systems.

+
+
+

6.5.2. View filters

+

View filters are the same as above, with one small difference: they use +views map function instead to filter one to process the changes feed. Each +time when a key-value pair could be emitted, a change is returned. This allows +to avoid creating filter functions that are mostly does same works as views.

+

To use them just specify _view value for filter parameter and +designdoc/viewname for view one:

+
GET /somedatabase/_changes?filter=_view&view=dname/viewname  HTTP/1.1
+
+
+

Note

+

Since view filters uses map functions as filters, they can’t show any +dynamic behavior since request object is not +available.

+
+
+

See also

+
+
CouchDB Guide:
+
+
+
CouchDB Wiki:
+
+
+
+
+
+
+
+

6.6. Validate document update functions

+
+
+validatefun(newDoc, oldDoc, userCtx, secObj)
+
+++ + + + + + + + +
Arguments:
    +
  • newDoc – New version of document that will be stored.
  • +
  • oldDoc – Previous version of document that is already stored.
  • +
  • userCtxUser Context Object
  • +
  • secObjSecurity Object
  • +
+
Throws :

forbidden error to gracefully prevent document storing.

+
Throws :

unauthorized error to prevent storage and allow the user to +re-auth.

+
+
+ +

A design document may contain a function named validate_doc_update +which can be used to prevent invalid or unauthorized document update requests +from being stored. The function is passed the new document from the update +request, the current document stored in the database, a User Context Object +containing information about the user writing the document (if present), and +a Security Object with lists of database security roles.

+

Validation functions typically examine the structure of the new document to +ensure that required fields are present and to verify that the requesting user +should be allowed to make changes to the document properties. For example, +an application may require that a user must be authenticated in order to create +a new document or that specific document fields be present when a document +is updated. The validation function can abort the pending document write +by throwing one of two error objects:

+
// user is not authorized to make the change but may re-authenticate
+throw({ unauthorized: 'Error message here.' });
+
+// change is not allowed
+throw({ forbidden: 'Error message here.' });
+
+
+

Document validation is optional, and each design document in the database may +have at most one validation function. When a write request is received for +a given database, the validation function in each design document in that +database is called in an unspecified order. If any of the validation functions +throw an error, the write will not succeed.

+

Example: The _design/_auth ddoc from _users database uses a validation +function to ensure that documents contain some required fields and are only +modified by a user with the _admin role:

+
function(newDoc, oldDoc, userCtx, secObj) {
+    if (newDoc._deleted === true) {
+        // allow deletes by admins and matching users
+        // without checking the other fields
+        if ((userCtx.roles.indexOf('_admin') !== -1) ||
+            (userCtx.name == oldDoc.name)) {
+            return;
+        } else {
+            throw({forbidden: 'Only admins may delete other user docs.'});
+        }
+    }
+
+    if ((oldDoc && oldDoc.type !== 'user') || newDoc.type !== 'user') {
+        throw({forbidden : 'doc.type must be user'});
+    } // we only allow user docs for now
+
+    if (!newDoc.name) {
+        throw({forbidden: 'doc.name is required'});
+    }
+
+    if (!newDoc.roles) {
+        throw({forbidden: 'doc.roles must exist'});
+    }
+
+    if (!isArray(newDoc.roles)) {
+        throw({forbidden: 'doc.roles must be an array'});
+    }
+
+    if (newDoc._id !== ('org.couchdb.user:' + newDoc.name)) {
+        throw({
+            forbidden: 'Doc ID must be of the form org.couchdb.user:name'
+        });
+    }
+
+    if (oldDoc) { // validate all updates
+        if (oldDoc.name !== newDoc.name) {
+            throw({forbidden: 'Usernames can not be changed.'});
+        }
+    }
+
+    if (newDoc.password_sha && !newDoc.salt) {
+        throw({
+            forbidden: 'Users with password_sha must have a salt.' +
+                'See /_utils/script/couch.js for example code.'
+        });
+    }
+
+    var is_server_or_database_admin = function(userCtx, secObj) {
+        // see if the user is a server admin
+        if(userCtx.roles.indexOf('_admin') !== -1) {
+            return true; // a server admin
+        }
+
+        // see if the user a database admin specified by name
+        if(secObj && secObj.admins && secObj.admins.names) {
+            if(secObj.admins.names.indexOf(userCtx.name) !== -1) {
+                return true; // database admin
+            }
+        }
+
+        // see if the user a database admin specified by role
+        if(secObj && secObj.admins && secObj.admins.roles) {
+            var db_roles = secObj.admins.roles;
+            for(var idx = 0; idx < userCtx.roles.length; idx++) {
+                var user_role = userCtx.roles[idx];
+                if(db_roles.indexOf(user_role) !== -1) {
+                    return true; // role matches!
+                }
+            }
+        }
+
+        return false; // default to no admin
+    }
+
+    if (!is_server_or_database_admin(userCtx, secObj)) {
+        if (oldDoc) { // validate non-admin updates
+            if (userCtx.name !== newDoc.name) {
+                throw({
+                    forbidden: 'You may only update your own user document.'
+                });
+            }
+            // validate role updates
+            var oldRoles = oldDoc.roles.sort();
+            var newRoles = newDoc.roles.sort();
+
+            if (oldRoles.length !== newRoles.length) {
+                throw({forbidden: 'Only _admin may edit roles'});
+            }
+
+            for (var i = 0; i < oldRoles.length; i++) {
+                if (oldRoles[i] !== newRoles[i]) {
+                    throw({forbidden: 'Only _admin may edit roles'});
+                }
+            }
+        } else if (newDoc.roles.length > 0) {
+            throw({forbidden: 'Only _admin may set roles'});
+        }
+    }
+
+    // no system roles in users db
+    for (var i = 0; i < newDoc.roles.length; i++) {
+        if (newDoc.roles[i][0] === '_') {
+            throw({
+                forbidden:
+                'No system roles (starting with underscore) in users db.'
+            });
+        }
+    }
+
+    // no system names as names
+    if (newDoc.name[0] === '_') {
+        throw({forbidden: 'Username may not start with underscore.'});
+    }
+
+    var badUserNameChars = [':'];
+
+    for (var i = 0; i < badUserNameChars.length; i++) {
+        if (newDoc.name.indexOf(badUserNameChars[i]) >= 0) {
+            throw({forbidden: 'Character `' + badUserNameChars[i] +
+                    '` is not allowed in usernames.'});
+        }
+    }
+}
+
+
+
+

Note

+

The return statement used only for function, it has no impact on +the validation process.

+
+
+

See also

+
+
CouchDB Guide:
+
+
+
CouchDB Wiki:
+
+
+
+
+
+
+ + +
+
+
+ +
+
+ + + + \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/genindex.html couchdb-1.4.0~rc.1/share/doc/build/html/genindex.html --- couchdb-1.2.0/share/doc/build/html/genindex.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/genindex.html 2013-08-23 10:59:02.000000000 -0400 @@ -0,0 +1,360 @@ + + + + + + + + + + + + Index — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + +
+
+
+
+ + +

Index

+ +
+ E + | F + | G + | I + | J + | L + | M + | P + | R + | S + | T + | U + | V + +
+

E

+ + + +
+ +
Emit() (built-in function) +
+ +
+ +
emit() (built-in function) +
+ +
+ +

F

+ + + +
+ +
filterfun() (built-in function) +
+ +
+ +
FoldRows() (built-in function) +
+ +
+ +

G

+ + + +
+ +
GetRow() (built-in function) +
+ +
+ +
getRow() (built-in function) +
+ +
+ +

I

+ + +
+ +
isArray() (built-in function) +
+ +
+ +

J

+ + +
+ +
JSON (global variable or constant) +
+ +
+ +

L

+ + + +
+ +
listfun() (built-in function) +
+ + +
Log() (built-in function) +
+ +
+ +
log() (built-in function) +
+ +
+ +

M

+ + +
+ +
mapfun() (built-in function) +
+ +
+ +

P

+ + +
+ +
provides() (built-in function) +
+ +
+ +

R

+ + + +
+ +
redfun() (built-in function) +
+ + +
registerType() (built-in function) +
+ +
+ +
require() (built-in function) +
+ +
+ +

S

+ + + +
+ +
send() (built-in function) +
+ + +
Send() (built-in function) +
+ + +
showfun() (built-in function) +
+ +
+ +
Start() (built-in function) +
+ + +
start() (built-in function) +
+ + +
sum() (built-in function) +
+ +
+ +

T

+ + +
+ +
toJSON() (built-in function) +
+ +
+ +

U

+ + +
+ +
updatefun() (built-in function) +
+ +
+ +

V

+ + +
+ +
validatefun() (built-in function) +
+ +
+ + + +
+
+
+
+
+ + + + + + + +

Utilities

+ + + + +

More Help

+ + +
+
+
+
+ + + + \ No newline at end of file Binary files /tmp/DjhtJwK2X2/couchdb-1.2.0/share/doc/build/html/_images/futon-createdb.png and /tmp/T3VxxJYoFO/couchdb-1.4.0~rc.1/share/doc/build/html/_images/futon-createdb.png differ Binary files /tmp/DjhtJwK2X2/couchdb-1.2.0/share/doc/build/html/_images/futon-editdoc.png and /tmp/T3VxxJYoFO/couchdb-1.4.0~rc.1/share/doc/build/html/_images/futon-editdoc.png differ Binary files /tmp/DjhtJwK2X2/couchdb-1.2.0/share/doc/build/html/_images/futon-editeddoc.png and /tmp/T3VxxJYoFO/couchdb-1.4.0~rc.1/share/doc/build/html/_images/futon-editeddoc.png differ Binary files /tmp/DjhtJwK2X2/couchdb-1.2.0/share/doc/build/html/_images/futon-overview.png and /tmp/T3VxxJYoFO/couchdb-1.4.0~rc.1/share/doc/build/html/_images/futon-overview.png differ Binary files /tmp/DjhtJwK2X2/couchdb-1.2.0/share/doc/build/html/_images/futon-replform.png and /tmp/T3VxxJYoFO/couchdb-1.4.0~rc.1/share/doc/build/html/_images/futon-replform.png differ diff -Nru couchdb-1.2.0/share/doc/build/html/index.html couchdb-1.4.0~rc.1/share/doc/build/html/index.html --- couchdb-1.2.0/share/doc/build/html/index.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/index.html 2013-08-23 10:58:59.000000000 -0400 @@ -0,0 +1,316 @@ + + + + + + + + + + Introduction — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + + +
+
+
+
+ +
+

Introduction

+

Apache CouchDB™ is a document database built for the web.

+

If you would like to help document the project, please send a note to the +developer mailing list.

+

This is a work in progress.

+
+
+

Contents

+
+ +
+
+ + +
+
+
+
+
+ + + + + +

Table Of Contents

+ + +

Next topic

+

1. Introduction

+ + +

Utilities

+ + + + +

More Help

+ + +
+
+
+
+ + + + \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/intro.html couchdb-1.4.0~rc.1/share/doc/build/html/intro.html --- couchdb-1.2.0/share/doc/build/html/intro.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/intro.html 2013-08-23 10:59:00.000000000 -0400 @@ -0,0 +1,423 @@ + + + + + + + + + + 1. Introduction — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

1. Introduction

+

There are two interfaces to CouchDB, the built-in Futon web-based +interface and the CouchDB API accessed through the HTTP REST interface. +The former is the simplest way to view and monitor your CouchDB +installation and perform a number of basic database and system +operations. More information on using the Futon interface can be found +in Using Futon.

+

The primary way to interact with the CouchDB API is to use a client +library or other interface that provides access to the underlying +functionality through your chosen language or platform. However, since +the API is supported through HTTP REST, you can interact with your +CouchDB with any solution that supports the HTTP protocol.

+

There are a number of different tools that talk the HTTP protocol and +allow you to set and configure the necessary information. One tool for +this that allows for access from the command-line is curl. See +Using curl.

+
+

1.1. Using Futon

+

Futon is a native web-based interface built into CouchDB. It provides a +basic interface to the majority of the functionality, including the +ability to create, update, delete and view documents and views, provides +access to the configuration parameters, and an interface for initiating +replication.

+

The default view is the Overview page which provides you with a list of +the databases. The basic structure of the page is consistent regardless +of the section you are in. The main panel on the left provides the main +interface to the databases, configuration or replication systems. The +side panel on the right provides navigation to the main areas of Futon +interface:

+
+Futon Overview +

Futon Overview

+
+

The main sections are:

+
    +
  • Overview

    +

    The main overview page, which provides a list of the databases and +provides the interface for querying the database and creating and +updating documents. See Managing Databases and Documents.

    +
  • +
  • Configuration

    +

    An interface into the configuration of your CouchDB installation. The +interface allows you to edit the different configurable parameters. +For more details on configuration, see Configuration.

    +
  • +
  • Replicator

    +

    An interface to the replication system, enabling you to initiate +replication between local and remote databases. See +Configuring Replication.

    +
  • +
  • Status

    +

    Displays a list of the running background tasks on the server. +Background tasks include view index building, compaction and +replication. The Status page is an interface to the +Active Tasks API call.

    +
  • +
  • Verify Installation

    +

    The Verify Installation allows you to check whether all of the +components of your CouchDB installation are correctly installed.

    +
  • +
  • Test Suite

    +

    The Test Suite section allows you to run the built-in test suite. +This executes a number of test routines entirely within your browser +to test the API and functionality of your CouchDB installation. If +you select this page, you can run the tests by using the Run All +button. This will execute all the tests, which may take some time.

    +
  • +
+
+

1.1.1. Managing Databases and Documents

+

You can manage databases and documents within Futon using the main +Overview section of the Futon interface.

+

To create a new database, click the Create Database ELLIPSIS button. You +will be prompted for the database name, as shown in the figure below.

+
+Creating a Database +

Creating a Database

+
+

Once you have created the database (or selected an existing one), you +will be shown a list of the current documents. If you create a new +document, or select an existing document, you will be presented with the +edit document display.

+

Editing documents within Futon requires selecting the document and then +editing (and setting) the fields for the document individually before +saving the document back into the database.

+

For example, the figure below shows the editor for a single document, a +newly created document with a single ID, the document _id field.

+
+Editing a Document +

Editing a Document

+
+

To add a field to the document:

+
    +
  1. Click Add Field.

    +
  2. +
  3. In the fieldname box, enter the name of the field you want to create. +For example, “company”.

    +
  4. +
  5. Click the green tick next to the field name to confirm the field name +change.

    +
  6. +
  7. Double-click the corresponding Value cell.

    +
  8. +
  9. Enter a company name, for example “Example”.

    +
  10. +
  11. Click the green tick next to the field value to confirm the field +value.

    +
  12. +
  13. The document is still not saved as this point. You must explicitly +save the document by clicking the Save Document button at the top of +the page. This will save the document, and then display the new +document with the saved revision information (the _rev field).

    +
    +Edited Document +

    Edited Document

    +
    +
  14. +
+

The same basic interface is used for all editing operations within Futon. +You must remember to save the individual element (fieldname, value) +using the green tick button, before then saving the document.

+
+
+

1.1.2. Configuring Replication

+

When you click the Replicator option within the Tools menu you are +presented with the Replicator screen. This allows you to start +replication between two databases by filling in or select the +appropriate options within the form provided.

+
+Replication Form +

Replication Form

+
+

To start a replication process, either the select the local database or +enter a remote database name into the corresponding areas of the form. +Replication occurs from the database on the left to the database on the +right.

+

If you are specifying a remote database name, you must specify the full +URL of the remote database (including the host, port number and database +name). If the remote instance requires authentication, you can specify +the username and password as part of the URL, for example +http://username:pass@remotehost:5984/demo.

+

To enable continuous replication, click the Continuous checkbox.

+

To start the replication process, click the Replicate button. The +replication process should start and will continue in the background. If +the replication process will take a long time, you can monitor the +status of the replication using the Status option under the Tools menu.

+

Once replication has been completed, the page will show the information +returned when the replication process completes by the API.

+

The Replicator tool is an interface to the underlying replication API. +For more information, see POST /_replicate. For more information on +replication, see Replication.

+
+
+
+

1.2. Using curl

+

The curl utility is a command line tool available on Unix, Linux, +Mac OS X and Windows and many other platforms. curl provides easy +access to the HTTP protocol (among others) directly from the +command-line and is therefore an ideal way of interacting with CouchDB +over the HTTP REST API.

+

For simple GET requests you can supply the URL of the request. For +example, to get the database information:

+
shell> curl http://127.0.0.1:5984
+
+
+

This returns the database information (formatted in the output below for +clarity):

+
{
+   "couchdb" : "Welcome",
+   "version" : "|version|",
+}
+
+
+
+

Note

+

For some URLs, especially those that include special characters such +as ampersand, exclamation mark, or question mark, you should quote +the URL you are specifying on the command line. For example:

+
shell> curl 'http://couchdb:5984/_uuids?count=5'
+
+
+
+

You can explicitly set the HTTP command using the -X command line +option. For example, when creating a database, you set the name of the +database in the URL you send using a PUT request:

+
shell> curl -X PUT http://127.0.0.1:5984/demo
+{"ok":true}
+
+
+

But to obtain the database information you use a GET request (with +the return information formatted for clarity):

+
shell> curl -X GET http://127.0.0.1:5984/demo
+{
+   "compact_running" : false,
+   "doc_count" : 0,
+   "db_name" : "demo",
+   "purge_seq" : 0,
+   "committed_update_seq" : 0,
+   "doc_del_count" : 0,
+   "disk_format_version" : 5,
+   "update_seq" : 0,
+   "instance_start_time" : "1306421773496000",
+   "disk_size" : 79
+}
+
+
+

For certain operations, you must specify the content type of request, +which you do by specifying the Content-Type header using the -H +command-line option:

+
shell> curl -H 'Content-Type: application/json' http://127.0.0.1:5984/_uuids
+
+
+

You can also submit ‘payload’ data, that is, data in the body of the +HTTP request using the -d option. This is useful if you need to +submit JSON structures, for example document data, as part of the +request. For example, to submit a simple document to the demo +database:

+
shell> curl -H 'Content-Type: application/json' \
+            -X POST http://127.0.0.1:5984/demo \
+            -d '{"company": "Example, Inc."}'
+{"ok":true,"id":"8843faaf0b831d364278331bc3001bd8",
+ "rev":"1-33b9fbce46930280dab37d672bbc8bb9"}
+
+
+

In the above example, the argument after the -d option is the JSON +of the document we want to submit.

+

The document can be accessed by using the automatically generated +document ID that was returned:

+
shell> curl -X GET http://127.0.0.1:5984/demo/8843faaf0b831d364278331bc3001bd8
+{"_id":"8843faaf0b831d364278331bc3001bd8",
+ "_rev":"1-33b9fbce46930280dab37d672bbc8bb9",
+ "company":"Example, Inc."}
+
+
+

The API samples in the API Basics show the HTTP command, URL and any +payload information that needs to be submitted (and the expected return +value). All of these examples can be reproduced using curl with the +command-line examples shown above.

+
+
+ + +
+
+
+
+
+ + + + + +

Table Of Contents

+ + +

Previous topic

+

Introduction

+

Next topic

+

2. API Basics

+ + +

Utilities

+ + + + +

More Help

+ + +
+
+
+
+ + + + \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/json-structure.html couchdb-1.4.0~rc.1/share/doc/build/html/json-structure.html --- couchdb-1.2.0/share/doc/build/html/json-structure.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/json-structure.html 2013-08-23 10:59:00.000000000 -0400 @@ -0,0 +1,1278 @@ + + + + + + + + + + 10. JSON Structure Reference — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

10. JSON Structure Reference

+

The following appendix provides a quick reference to all the JSON structures +that you can supply to CouchDB, or get in return to requests.

+
+

10.1. All Database Documents

+ ++++ + + + + + + + + + + + + + + + + + + + +
FieldDescription
total_rowsNumber of documents in the database/view
offsetOffset where the document list started
update_seq (optional)Current update sequence for the database
rows [array]Array of document object
+
+
+

10.2. Bulk Document Response

+ ++++ + + + + + + + + + + + + + + + + + + + +
FieldDescription
docs [array]Bulk Docs Returned Documents
idDocument ID
errorError type
reasonError string with extended reason
+
+
+

10.3. Bulk Documents

+ ++++ + + + + + + + + + + + + + + + + + + + + + + +
FieldDescription
all_or_nothing (optional)Sets the database commit mode to use +all-or-nothing semantics
docs [array]Bulk Documents Document
_id (optional)Document ID
_rev (optional)Revision ID (when updating an existing +document)
_deleted (optional)Whether the document should be deleted
+
+
+

10.4. Changes information for a database

+ ++++ + + + + + + + + + + + + + + + + + + + + + + +
FieldDescription
last_seqLast change sequence number
results [array]Changes made to a database
seqUpdate sequence number
idDocument ID
changes [array]List of changes, field-by-field, for this +document
+
+
+

10.5. CouchDB Document

+ ++++ + + + + + + + + + + + + + +
FieldDescription
_id (optional)Document ID
_rev (optional)Revision ID (when updating an existing +document)
+
+
+

10.6. CouchDB Error Status

+ ++++ + + + + + + + + + + + + + + + + +
FieldDescription
idDocument ID
errorError type
reasonError string with extended reason
+
+
+

10.7. CouchDB database information object

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FieldDescription
db_nameThe name of the database.
committed_update_seqThe number of committed update.
doc_countA count of the documents in the specified +database.
doc_del_countNumber of deleted documents
compact_runningSet to true if the database compaction +routine is operating on this database.
disk_format_versionThe version of the physical format used for +the data when it is stored on disk.
disk_sizeSize in bytes of the data as stored on the +disk. Views indexes are not included in the +calculation.
instance_start_timeTimestamp of when the database was opened, +expressed in microseconds since the epoch.
purge_seqThe number of purge operations on the +database.
update_seqThe current number of updates to the +database.
+
+
+

10.8. Design Document

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
FieldDescription
_idDesign Document ID
_revDesign Document Revision
viewsView
viewnameView Definition
mapMap Function for View
reduce (optional)Reduce Function for View
+
+
+

10.9. Design Document Information

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FieldDescription
nameName/ID of Design Document
view_indexView Index
compact_runningIndicates whether a compaction routine is +currently running on the view
disk_sizeSize in bytes of the view as stored on disk
languageLanguage for the defined views
purge_seqThe purge sequence that has been processed
signatureMD5 signature of the views for the design +document
update_seqThe update sequence of the corresponding +database that has been indexed
updater_runningIndicates if the view is currently being +updated
waiting_clientsNumber of clients waiting on views from this +design document
waiting_commitIndicates if there are outstanding commits +to the underlying database that need to +processed
+
+
+

10.10. Document with Attachments

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
FieldDescription
_id (optional)Document ID
_rev (optional)Revision ID (when updating an existing +document)
_attachments (optional)Document Attachment
filenameAttachment information
content_typeMIME Content type string
dataFile attachment content, Base64 encoded
+
+
+

10.11. List of Active Tasks

+ ++++ + + + + + + + + + + + + + + + + + + + + + + +
FieldDescription
tasks [array]Active Task
pidProcess ID
statusTask status message
taskTask name
typeOperation Type
+
+
+

10.12. Replication Settings

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FieldDescription
sourceSource database name or URL
targetTarget database name or URL
create_target (optional)Creates the target database
continuous (optional)Configure the replication to be continuous
cancel (optional)Cancels the replication
doc_ids (optional)Array of document IDs to be synchronized
proxy (optional)Address of a proxy server through which +replication should occur
since_seq (optional)Sequence from which the replication should +start
filter (optional)name of the filter function in the form of +ddoc/myfilter
query_params (optional)query parameter that are passed to the +filter function; value should be a document +containing parameters as members
+
+
+

10.13. Replication Status

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FieldDescription
okReplication status
session_idUnique session ID
source_last_seqLast sequence number read from source +database
history [array]Replication History
session_idSession ID for this replication operation
recorded_seqLast recorded sequence number
docs_readNumber of documents read
docs_writtenNumber of documents written to target
doc_write_failuresNumber of document write failures
start_timeDate/Time replication operation started
start_last_seqFirst sequence number in changes stream
end_timeDate/Time replication operation completed
end_last_seqLast sequence number in changes stream
missing_checkedNumber of missing documents checked
missing_foundNumber of missing documents found
+
+
+

10.14. Request object

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FieldDescription
bodyRequest body data as string. +If request method is GET method contains +this field contains "undefined" value, +while if DELETE or HEAD value is "" +(empty string)
cookieCookies object.
formForm data object. +Contains decoded body as key-value pairs if +Content-Type header was +application/x-www-form-urlencoded.
headersRequest headers object.
idRequested document id string if it was +specified or null otherwise.
infoDatabase information
methodRequest method as string or array. +String value is method is one of: HEAD, +GET, POST, PUT, DELETE, OPTIONS, +and TRACE, otherwise it will be +represented as array of char codes.
pathList of requested path sections.
peerRequest source IP address.
queryURL query parameters object. +Note that multiple keys not supported and +last key value suppress others.
requested_pathList of actual requested path section.
raw_pathRaw requested path string.
secObjSecurity Object.
userCtxUser Context Object.
uuidGenerated UUID by specified algorithm in +config file.
+
{
+    "body": "undefined",
+    "cookie": {
+        "AuthSession": "cm9vdDo1MDZBRjQzRjrfcuikzPRfAn-EA37FmjyfM8G8Lw",
+        "m": "3234"
+    },
+    "form": {},
+    "headers": {
+        "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
+        "Accept-Charset": "ISO-8859-1,utf-8;q=0.7,*;q=0.3",
+        "Accept-Encoding": "gzip,deflate,sdch",
+        "Accept-Language": "en-US,en;q=0.8",
+        "Connection": "keep-alive",
+        "Cookie": "m=3234:t|3247:t|6493:t|6967:t|34e2:|18c3:t|2c69:t|5acb:t|ca3:t|c01:t|5e55:t|77cb:t|2a03:t|1d98:t|47ba:t|64b8:t|4a01:t; AuthSession=cm9vdDo1MDZBRjQzRjrfcuikzPRfAn-EA37FmjyfM8G8Lw",
+        "Host": "127.0.0.1:5984",
+        "User-Agent": "Mozilla/5.0 (Windows NT 5.2) AppleWebKit/535.7 (KHTML, like Gecko) Chrome/16.0.912.75 Safari/535.7"
+    },
+    "id": "foo",
+    "info": {
+        "committed_update_seq": 2701412,
+        "compact_running": false,
+        "data_size": 7580843252,
+        "db_name": "mailbox",
+        "disk_format_version": 6,
+        "disk_size": 14325313673,
+        "doc_count": 2262757,
+        "doc_del_count": 560,
+        "instance_start_time": "1347601025628957",
+        "purge_seq": 0,
+        "update_seq": 2701412
+    },
+    "method": "GET",
+    "path": [
+        "mailbox",
+        "_design",
+        "request",
+        "_show",
+        "dump",
+        "foo"
+    ],
+    "peer": "127.0.0.1",
+    "query": {},
+    "raw_path": "/mailbox/_design/request/_show/dump/foo",
+    "requested_path": [
+        "mailbox",
+        "_design",
+        "request",
+        "_show",
+        "dump",
+        "foo"
+    ],
+    "secObj": {
+        "admins": {
+            "names": [
+                "Bob"
+            ],
+            "roles": []
+        },
+        "members": {
+            "names": [
+                "Mike",
+                "Alice"
+            ],
+            "roles": []
+        }
+    },
+    "userCtx": {
+        "db": "mailbox",
+        "name": "Mike",
+        "roles": [
+            "user"
+        ]
+    },
+    "uuid": "3184f9d1ea934e1f81a24c71bde5c168"
+}
+
+
+
+
+

10.15. Response object

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
FieldDescription
codeHTTP status code number.
jsonJSON encodable object. +Implicitly sets Content-Type header as +application/json.
bodyRaw response text string. +Implicitly sets Content-Type header as +text/html; charset=utf-8.
base64Base64 encoded string. +Implicitly sets Content-Type header as +application/binary.
headersResponse headers object. +Content-Type header from this object +overrides any implicitly assigned one.
stopboolean signal to stop iteration over +view result rows (for list functions only)
+
+

Warning

+

body, base64 and json object keys are overlaps each other and +the last wins. Since most realizations of key-value objects doesn’t preserve +key order mixing them may create confusing situation. Try to use only one of +them.

+
+
+

Note

+

Any custom property makes CouchDB raise internal exception. +Also Response object could be a simple string value which would be +implicitly wrapped into {"body": ...} object.

+
+
+
+

10.16. Returned CouchDB Document with Detailed Revision Info

+ ++++ + + + + + + + + + + + + + + + + + + + + + + +
FieldDescription
_id (optional)Document ID
_rev (optional)Revision ID (when updating an existing +document)
_revs_info [array]CouchDB Document Extended Revision Info
revFull revision string
statusStatus of the revision
+
+
+

10.17. Returned CouchDB Document with Revision Info

+ ++++ + + + + + + + + + + + + + + + + + + + + + + +
FieldDescription
_id (optional)Document ID
_rev (optional)Revision ID (when updating an existing +document)
_revisionsCouchDB Document Revisions
ids [array]Array of valid revision IDs, in reverse +order (latest first)
startPrefix number for the latest revision
+
+
+

10.18. Returned Document with Attachments

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FieldDescription
_id (optional)Document ID
_rev (optional)Revision ID (when updating an existing +document)
_attachments (optional)Document Attachment
filenameAttachment
stubIndicates whether the attachment is a stub
content_typeMIME Content type string
lengthLength (bytes) of the attachment data
revposRevision where this attachment exists
+
+
+

10.19. Security Object

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + +
FieldDescription
adminsRoles/Users with admin privileges
roles [array]List of roles with parent privilege
users [array]List of users with parent privilege
readersRoles/Users with reader privileges
roles [array]List of roles with parent privilege
users [array]List of users with parent privilege
+
{
+    "admins": {
+        "names": [
+            "Bob"
+        ],
+        "roles": []
+    },
+    "members": {
+        "names": [
+            "Mike",
+            "Alice"
+        ],
+        "roles": []
+    }
+  }
+
+
+
+
+

10.20. User Context Object

+ ++++ + + + + + + + + + + + + + + + + +
FieldDescription
dbDatabase name in context of provided +operation.
nameUser name.
rolesList of user roles.
+
{
+    "db": "mailbox",
+    "name": null,
+    "roles": [
+        "_admin"
+    ]
+}
+
+
+
+
+

10.21. View Head Information

+ ++++ + + + + + + + + + + + + + +
FieldDescription
total_rowsNumber of documents in the view
offsetOffset where the document list started
+
{
+    "total_rows": 42,
+    "offset": 3
+}
+
+
+
+
+

10.22. Number Handling

+

Any numbers defined in JSON that contain a decimal point or exponent +will be passed through the Erlang VM’s idea of the “double” data type. +Any numbers that are used in views will pass through the views idea of +a number (the common JavaScript case means even integers pass through +a double due to JavaScript’s definition of a number).

+

Consider this document that we write to CouchDB:

+
{
+  "_id":"30b3b38cdbd9e3a587de9b8122000cff",
+  "number": 1.1
+}
+
+
+

Now let’s read that document back from CouchDB:

+
{
+  "_id":"30b3b38cdbd9e3a587de9b8122000cff",
+  "_rev":"1-f065cee7c3fd93aa50f6c97acde93030",
+  "number":1.1000000000000000888
+}
+
+
+

What happens is CouchDB is changing the textual representation of the +result of decoding what it was given into some numerical format. In most +cases this is an IEEE 754 double precision floating point number which +is exactly what almost all other languages use as well.

+

What CouchDB does a bit differently than other languages is that it +does not attempt to pretty print the resulting output to use the +shortest number of characters. For instance, this is why we have this +relationship:

+
ejson:encode(ejson:decode(<<"1.1">>)).
+<<"1.1000000000000000888">>
+
+
+

What can be confusing here is that internally those two formats +decode into the same IEEE-754 representation. And more importantly, it +will decode into a fairly close representation when passed through all +major parsers that I know about.

+

While we’ve only been discussing cases where the textual +representation changes, another important case is when an input value +is contains more precision than can actually represented in a double. +(You could argue that this case is actually “losing” data if you don’t +accept that numbers are stored in doubles).

+

Here’s a log for a couple of the more common JSON libraries I happen +to have on my machine:

+

Spidermonkey:

+
$ js -h 2>&1 | head -n 1
+JavaScript-C 1.8.5 2011-03-31
+$ js
+js> JSON.stringify(JSON.parse("1.01234567890123456789012345678901234567890"))
+"1.0123456789012346"
+js> var f = JSON.stringify(JSON.parse("1.01234567890123456789012345678901234567890"))
+js> JSON.stringify(JSON.parse(f))
+"1.0123456789012346"
+
+

Node:

+
$ node -v
+v0.6.15
+$ node
+JSON.stringify(JSON.parse("1.01234567890123456789012345678901234567890"))
+'1.0123456789012346'
+var f = JSON.stringify(JSON.parse("1.01234567890123456789012345678901234567890"))
+undefined
+JSON.stringify(JSON.parse(f))
+'1.0123456789012346'
+
+

Python:

+
$ python
+Python 2.7.2 (default, Jun 20 2012, 16:23:33)
+[GCC 4.2.1 Compatible Apple Clang 4.0 (tags/Apple/clang-418.0.60)] on darwin
+Type "help", "copyright", "credits" or "license" for more information.
+import json
+json.dumps(json.loads("1.01234567890123456789012345678901234567890"))
+'1.0123456789012346'
+f = json.dumps(json.loads("1.01234567890123456789012345678901234567890"))
+json.dumps(json.loads(f))
+'1.0123456789012346'
+
+

Ruby:

+
$ irb --version
+irb 0.9.5(05/04/13)
+require 'JSON'
+=> true
+JSON.dump(JSON.load("[1.01234567890123456789012345678901234567890]"))
+=> "[1.01234567890123]"
+f = JSON.dump(JSON.load("[1.01234567890123456789012345678901234567890]"))
+=> "[1.01234567890123]"
+JSON.dump(JSON.load(f))
+=> "[1.01234567890123]"
+
+
+

Note

+

A small aside on Ruby, it requires a top level object or array, so I just +wrapped the value. Should be obvious it doesn’t affect the result of +parsing the number though.

+
+

Ejson (CouchDB’s current parser) at CouchDB sha 168a663b:

+
$ ./utils/run -i
+Erlang R14B04 (erts-5.8.5) [source] [64-bit] [smp:2:2] [rq:2]
+[async-threads:4] [hipe] [kernel-poll:true]
+
+Eshell V5.8.5  (abort with ^G)
+1> ejson:encode(ejson:decode(<<"1.01234567890123456789012345678901234567890">>)).
+<<"1.0123456789012346135">>
+2> F = ejson:encode(ejson:decode(<<"1.01234567890123456789012345678901234567890">>)).
+<<"1.0123456789012346135">>
+3> ejson:encode(ejson:decode(F)).
+<<"1.0123456789012346135">>
+
+

As you can see they all pretty much behave the same except for Ruby +actually does appear to be losing some precision over the other +libraries.

+

The astute observer will notice that ejson (the CouchDB JSON library) +reported an extra three digits. While its tempting to think that this +is due to some internal difference, its just a more specific case of +the 1.1 input as described above.

+

The important point to realize here is that a double can only hold a +finite number of values. What we’re doing here is generating a string +that when passed through the “standard” floating point parsing +algorithms (ie, strtod) will result in the same bit pattern in memory +as we started with. Or, slightly different, the bytes in a JSON +serialized number are chosen such that they refer to a single specific +value that a double can represent.

+

The important point to understand is that we’re mapping from one +infinite set onto a finite set. An easy way to see this is by +reflecting on this:

+
1.0 == 1.00 == 1.000 = 1.(infinite zeroes)
+
+

Obviously a computer can’t hold infinite bytes so we have to +decimate our infinitely sized set to a finite set that can be +represented concisely.

+

The game that other JSON libraries are playing is merely:

+

“How few characters do I have to use to select this specific value for a double”

+

And that game has lots and lots of subtle details that are difficult +to duplicate in C without a significant amount of effort (it took +Python over a year to get it sorted with their fancy build systems +that automatically run on a number of different architectures).

+

Hopefully we’ve shown that CouchDB is not doing anything “funky” by +changing input. Its behaving the same as any other common JSON library +does, its just not pretty printing its output.

+

On the other hand, if you actually are in a position where an IEEE-754 +double is not a satisfactory datatype for your numbers, then the +answer as has been stated is to not pass your numbers through this +representation. In JSON this is accomplished by encoding them as a +string or by using integer types (although integer types can still +bite you if you use a platform that has a different integer +representation than normal, ie, JavaScript).

+

Also, if anyone is really interested in changing this behavior, I’m +all ears for contributions to jiffy (which is theoretically going to +replace ejson when I get around to updating the build system). The +places I’ve looked for inspiration are TCL and Python. If you know a +decent implementation of this float printing algorithm give me a +holler.

+
+
+ + +
+
+
+ +
+
+ + + + \ No newline at end of file Binary files /tmp/DjhtJwK2X2/couchdb-1.2.0/share/doc/build/html/objects.inv and /tmp/T3VxxJYoFO/couchdb-1.4.0~rc.1/share/doc/build/html/objects.inv differ diff -Nru couchdb-1.2.0/share/doc/build/html/query-servers.html couchdb-1.4.0~rc.1/share/doc/build/html/query-servers.html --- couchdb-1.2.0/share/doc/build/html/query-servers.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/query-servers.html 2013-08-23 10:59:01.000000000 -0400 @@ -0,0 +1,739 @@ + + + + + + + + + + 7. Query Servers — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

7. Query Servers

+
+

7.1. JavaScript

+
+

Note

+

While every design function has access to all JavaScript objects, +the table below describes appropriate usage cases. For example, +you may use emit() in List functions, but getRow() is not permitted during Map functions.

+
+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
JS FunctionReasonable to use in design doc functions
emit()Map functions
getRow()List functions
JSONany
isArray()any
log()any
provides()Show functions, List functions
registerType()Show functions, List functions
require()any, except Reduce and rereduce functions
send()List functions
start()List functions
sum()any
toJSON()any
+
+

7.1.1. Design functions context

+

Each design function executes in a special context of predefined objects, +modules and functions:

+
+
+emit(key, value)
+

Emits a key-value pair for further processing by CouchDB after the map +function is done.

+ +++ + + + +
Arguments:
    +
  • key – The view key
  • +
  • value – The key‘s associated value
  • +
+
+
function(doc){
+  emit(doc._id, doc._rev);
+}
+
+
+
+ +
+
+getRow()
+

Extracts the next row from a related view result.

+ +++ + + + + + +
Returns:View result row
Return type:object
+
function(head, req){
+  send('[');
+  row = getRow();
+  if (row){
+    send(toJSON(row));
+    while(row = getRow()){
+      send(',');
+      send(toJSON(row));
+    }
+  }
+  return ']';
+}
+
+
+
+ +
+
+JSON
+

JSON2 +object.

+
+ +
+
+isArray(obj)
+

A helper function to check if the provided value is an Array.

+ +++ + + + + + + + +
Arguments:
    +
  • obj – Any Javascript value
  • +
+
Returns:

true if obj is Array-typed, false otherwise

+
Return type:

boolean

+
+
+ +
+
+log(message)
+

Log a message to the CouchDB log (at the INFO level).

+ +++ + + + +
Arguments:
    +
  • message – Message to be logged
  • +
+
+
function(doc){
+  log('Procesing doc ' + doc['_id']);
+  emit(doc['_id'], null);
+}
+
+
+

After the map function has run, the following line can be found in CouchDB +logs (e.g. at /var/log/couchdb/couch.log):

+
[Sat, 03 Nov 2012 17:38:02 GMT] [info] [<0.7543.0>] OS Process #Port<0.3289> Log :: Processing doc 8d300b86622d67953d102165dbe99467
+
+
+
+ +
+
+provides(key, func)
+

Registers callable handler for specified MIME key.

+ +++ + + + +
Arguments:
    +
  • key – MIME key previously defined by registerType()
  • +
  • func – MIME type handler
  • +
+
+
+ +
+
+registerType(key, *mimes)
+

Registers list of MIME types by associated key.

+ +++ + + + +
Arguments:
    +
  • key – MIME types
  • +
  • mimes – MIME types enumeration
  • +
+
+

Predefined mappings (key-array):

+
    +
  • all: */*
  • +
  • text: text/plain; charset=utf-8, txt
  • +
  • html: text/html; charset=utf-8
  • +
  • xhtml: application/xhtml+xml, xhtml
  • +
  • xml: application/xml, text/xml, application/x-xml
  • +
  • js: text/javascript, application/javascript, +application/x-javascript
  • +
  • css: text/css
  • +
  • ics: text/calendar
  • +
  • csv: text/csv
  • +
  • rss: application/rss+xml
  • +
  • atom: application/atom+xml
  • +
  • yaml: application/x-yaml, text/yaml
  • +
  • multipart_form: multipart/form-data
  • +
  • url_encoded_form: application/x-www-form-urlencoded
  • +
  • json: application/json, text/x-json
  • +
+
+ +
+
+require(path)
+

Loads CommonJS module by a specified path. The path should not start with +a slash.

+ +++ + + + + + +
Arguments:
    +
  • path – A CommonJS module path started from design document root
  • +
+
Returns:

Exported statements

+
+
+ +
+
+send(chunk)
+

Sends a single string chunk in response.

+ +++ + + + +
Arguments:
    +
  • chunk – Text chunk
  • +
+
+
function(head, req){
+  send('Hello,');
+  send(' ');
+  send('Couch');
+  return !
+}
+
+
+
+ +
+
+start(init_resp)
+

Initiates chunked response. As an option, a custom +response object may be sent at this point. +For list-functions only!

+
+

Note

+

list functions may set the HTTP response code and headers by calling +this function. This function must be called before send(), +getRow() or a return statement; otherwise, the query server will +implicitly call this function with the empty object ({}).

+
+
function(head, req){
+  start({
+    "code": 302,
+    "headers": {
+      "Location": "http://couchdb.apache.org"
+    }
+  });
+  return "Relax!";
+}
+
+
+
+ +
+
+sum(arr)
+

Sum arr‘s items.

+ +++ + + + + + +
Arguments:
    +
  • arr – Array of numbers
  • +
+
Return type:

number

+
+
+ +
+
+toJSON(obj)
+

Encodes obj to JSON string. This is an alias for the JSON.stringify +method.

+ +++ + + + + + +
Arguments:
    +
  • obj – JSON encodable object
  • +
+
Returns:

JSON string

+
+
+ +
+
+

7.1.2. CommonJS Modules

+

Support for CommonJS Modules +(introduced in CouchDB 0.11.0) allows you to create modular design functions +without the need for duplication of functionality.

+

Here’s a CommonJS module that checks user permissions:

+
function user_context(userctx, secobj) {
+  var is_admin = function() {
+    return userctx.indexOf('_admin') != -1;
+  }
+  return {'is_admin': is_admin}
+}
+
+exports['user'] = user_context
+
+
+

Each module has access to additional global variables:

+
    +
  • module (object): Contains information about the stored module
      +
    • id (string): The module id; a JSON path in ddoc context
    • +
    • current (code): Compiled module code object
    • +
    • parent (object): Parent frame
    • +
    • exports (object): Export statements
    • +
    +
  • +
  • exports (object): Shortcut to the module.exports object
  • +
+

The CommonJS module can be added to a design document, like so:

+
{
+   "views": {
+      "lib": {
+         "security": "function user_context(userctx, secobj) { ... }"
+      },
+      "validate_doc_update": "function(newdoc, olddoc, userctx, secobj) {
+        user = require('lib/security').user(userctx, secobj);
+        return user.is_admin();
+      }"
+   },
+   "_id": "_design/test"
+}
+
+
+

Modules paths are relative to the design document’s views object, but +modules can only be loaded from the object referenced via lib. The +lib structure can still be used for view functions as well, by simply +storing view functions at e.g. views.lib.map, views.lib.reduce, etc.

+
+
+
+

7.2. Erlang

+
+

Warning

+

Unlike the JavaScript query server, the Erlang query server does not +run in a sandbox. This means Erlang code has full access to your OS, +filesystem and network, which may lead to security issues. While Erlang +functions are faster than JavaScript ones, you need to be careful +about running them, especially if they were written by someone else.

+
+
+

Note

+

Due to security restrictions, the Erlang query server is disabled by +default. To enable it, you need to edit your local.ini to include a +native_query_servers section:

+
[native_query_servers]
+erlang = {couch_native_process, start_link, []}
+
+
+

Don’t forget to restart CouchDB after updating the configuration, and +use the language: "erlang" property in your Erlang design documents.

+
+
+
+Emit(Id, Value)
+

Emits key-value pairs to view indexer process.

+
fun({Doc}) ->
+  <<K,_/binary>> = proplists:get_value(<<"_rev">>, Doc, null),
+  V = proplists:get_value(<<"_id">>, Doc, null),
+  Emit(<<K>>, V)
+end.
+
+
+
+ +
+
+FoldRows(Fun, Acc)
+

Helper to iterate over all rows in a list function.

+ +++ + + + +
Arguments:
    +
  • Fun – Function object.
  • +
  • Acc – The value previously returned by Fun.
  • +
+
+
fun(Head, {Req}) ->
+  Fun = fun({Row}, Acc) ->
+    Id = couch_util:get_value(<<"id">>, Row),
+    Send(list_to_binary(io_lib:format("Previous doc id: ~p~n", [Acc]))),
+    Send(list_to_binary(io_lib:format("Current  doc id: ~p~n", [Id]))),
+    {ok, Id}
+  end,
+  FoldRows(Fun, nil),
+  ""
+end.
+
+
+
+ +
+
+GetRow()
+

Retrieves the next row from a related view result.

+
%% FoldRows background implementation.
+%% https://git-wip-us.apache.org/repos/asf?p=couchdb.git;a=blob;f=src/couchdb/couch_native_process.erl;hb=HEAD#l368
+%%
+foldrows(GetRow, ProcRow, Acc) ->
+  case GetRow() of
+    nil ->
+      {ok, Acc};
+    Row ->
+      case (catch ProcRow(Row, Acc)) of
+        {ok, Acc2} ->
+          foldrows(GetRow, ProcRow, Acc2);
+        {stop, Acc2} ->
+          {ok, Acc2}
+      end
+end.
+
+
+
+ +
+
+Log(Msg)
+
+++ + + + +
Arguments:
    +
  • Msg – Log a message at the INFO level.
  • +
+
+
fun({Doc}) ->
+  <<K,_/binary>> = proplists:get_value(<<"_rev">>, Doc, null),
+  V = proplists:get_value(<<"_id">>, Doc, null),
+  Log(lists:flatten(io_lib:format("Hello from ~s doc!", [V]))),
+  Emit(<<K>>, V)
+end.
+
+
+

After the map function has run, the following line can be found in +CouchDB logs (e.g. at /var/log/couchdb/couch.log):

+
[Sun, 04 Nov 2012 11:33:58 GMT] [info] [<0.9144.2>] Hello from 8d300b86622d67953d102165dbe99467 doc!
+
+
+
+ +
+
+Send(Chunk)
+

Sends a single string Chunk in response.

+
fun(Head, {Req}) ->
+  Send("Hello,"),
+  Send(" "),
+  Send("Couch"),
+  "!"
+end.
+
+
+

The function above produces the following response:

+
Hello, Couch!
+
+
+
+ +
+
+Start(Headers)
+
+++ + + + +
Arguments: +
+

Initialize List functions response. At this point, response code and headers +may be defined. For example, this function redirects to the CouchDB web site:

+
fun(Head, {Req}) ->
+  Start({[{<<"code">>, 302},
+          {<<"headers">>, {[
+            {<<"Location">>, <<"http://couchdb.apache.org">>}]
+          }}
+        ]}),
+  "Relax!"
+end.
+
+
+
+ +
+
+ + +
+
+
+
+
+ + + + + +

Table Of Contents

+ + +

Previous topic

+

6. Design Docs

+

Next topic

+

8. Changes Feed

+ + +

Utilities

+ + + + +

More Help

+ + +
+
+
+
+ + + + \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/replication.html couchdb-1.4.0~rc.1/share/doc/build/html/replication.html --- couchdb-1.2.0/share/doc/build/html/replication.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/replication.html 2013-08-23 10:59:01.000000000 -0400 @@ -0,0 +1,247 @@ + + + + + + + + + + 4. Replication — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

4. Replication

+

One of CouchDB’s strengths is the ability to synchronize two copies of the same +database. This enables users to distribute data across several nodes or +datacenters, but also to move data more closely to clients.

+

Replication involves a source and a destination database, which can be one the +same or on different CouchDB instances. The aim of the replication is that at +the end of the process, all active documents on the source database are also in +the destination database and all documents that were deleted in the source +databases are also deleted on the destination database (if they even existed).

+
+

4.1. Triggering Replication

+

Replication is controlled through documents in the Replicator Database, where +each document describes one replication process (see +Replication Settings).

+

A replication is triggered by storing a replication document in the replicator +database. Its status can be inspected through the active tasks API (see +GET /_active_tasks and Replication Status). A replication can be +stopped by deleting the document, or by updating it with its cancel property +set to true.

+
+
+

4.2. Replication Procedure

+

During replication, CouchDB will compare the source and the destination +database to determine which documents differ between the source and the +destination database. It does so by following the Changes Feed on the source +and comparing the documents to the destination. Changes are submitted to the +destination in batches where they can introduce conflicts. Documents that +already exist on the destination in the same revision are not transferred. As +the deletion of documents is represented by a new revision, a document deleted +on the source will also be deleted on the target.

+

A replication task will finish once it reaches the end of the changes feed. If +its continuous property is set to true, it will wait for new changes to +appear until the task is cancelled. Replication tasks also create checkpoint +documents on the destination to ensure that a restarted task can continue from +where it stopped, for example after it has crashed.

+

When a replication task is initiated on the sending node, it is called push +replication, if it is initiated by the receiving node, it is called pull +replication.

+
+
+

4.3. Master - Master replication

+

One replication task will only transfer changes in one direction. To achieve +master-master replication it is possible to set up two replication tasks in +different directions. When a change is replication from database A to B by the +first task, the second will discover that the new change on B already exists in +A and will wait for further changes.

+
+
+

4.4. Controlling which Documents to Replicate

+

There are two ways for controlling which documents are replicated, and which +are skipped. Local documents are never replicated (see Local (non-replicating) Document Methods).

+

Additionally, Filter functions can be used in a replication documents (see +Replication Settings). The replication task will then evaluate +the filter function for each document in the changes feed. The document will +only be replicated if the filter returns true.

+
+
+

4.5. Migrating Data to Clients

+

Replication can be especially useful for bringing data closer to clients. +PouchDB implements the replication algorithm of CouchDB +in JavaScript, making it possible to make data from a CouchDB database +available in an offline browser application, and synchronize changes back to +CouchDB.

+
+
+ + +
+
+
+
+
+ + + + + +

Table Of Contents

+ + +

Previous topic

+

3. Configuration

+

Next topic

+

5. Replicator Database

+ + +

Utilities

+ + + + +

More Help

+ + +
+
+
+
+ + + + \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/replicator.html couchdb-1.4.0~rc.1/share/doc/build/html/replicator.html --- couchdb-1.2.0/share/doc/build/html/replicator.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/replicator.html 2013-08-23 10:59:02.000000000 -0400 @@ -0,0 +1,503 @@ + + + + + + + + + + 5. Replicator Database — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + + + +
+
+
+
+ +
+

5. Replicator Database

+

A database where you PUT/POST documents to trigger replications +and you DELETE to cancel ongoing replications. These documents have +exactly the same content as the JSON objects we used to POST to +_replicate (fields source, target, create_target, +continuous, doc_ids, filter, query_params.

+

Replication documents can have a user defined _id. Design documents +(and _local documents) added to the replicator database are ignored.

+

The default name of this database is _replicator. The name can be +changed in the local.ini configuration, section [replicator], +parameter db.

+
+

5.1. Basics

+

Let’s say you PUT the following document into _replicator:

+
{
+    "_id": "my_rep",
+    "source":  "http://myserver.com:5984/foo",
+    "target":  "bar",
+    "create_target":  true
+}
+
+
+

In the couch log you’ll see 2 entries like these:

+
[Thu, 17 Feb 2011 19:43:59 GMT] [info] [<0.291.0>] Document `my_rep` triggered replication `c0ebe9256695ff083347cbf95f93e280+create_target`
+[Thu, 17 Feb 2011 19:44:37 GMT] [info] [<0.124.0>] Replication `c0ebe9256695ff083347cbf95f93e280+create_target` finished (triggered by document `my_rep`)
+
+
+

As soon as the replication is triggered, the document will be updated by +CouchDB with 3 new fields:

+
{
+    "_id": "my_rep",
+    "source":  "http://myserver.com:5984/foo",
+    "target":  "bar",
+    "create_target":  true,
+    "_replication_id":  "c0ebe9256695ff083347cbf95f93e280",
+    "_replication_state":  "triggered",
+    "_replication_state_time":  1297974122
+}
+
+
+

Special fields set by the replicator start with the prefix +_replication_.

+
    +
  • _replication_id

    +

    The ID internally assigned to the replication. This is also the ID +exposed by /_active_tasks.

    +
  • +
  • _replication_state

    +

    The current state of the replication.

    +
  • +
  • _replication_state_time

    +

    A Unix timestamp (number of seconds since 1 Jan 1970) that tells us +when the current replication state (marked in _replication_state) +was set.

    +
  • +
+

When the replication finishes, it will update the _replication_state +field (and _replication_state_time) with the value completed, so +the document will look like:

+
{
+    "_id": "my_rep",
+    "source":  "http://myserver.com:5984/foo",
+    "target":  "bar",
+    "create_target":  true,
+    "_replication_id":  "c0ebe9256695ff083347cbf95f93e280",
+    "_replication_state":  "completed",
+    "_replication_state_time":  1297974122
+}
+
+
+

When an error happens during replication, the _replication_state +field is set to error (and _replication_state_time gets updated of +course).

+

When you PUT/POST a document to the _replicator database, CouchDB +will attempt to start the replication up to 10 times (configurable under +[replicator], parameter max_replication_retry_count). If it +fails on the first attempt, it waits 5 seconds before doing a second +attempt. If the second attempt fails, it waits 10 seconds before doing a +third attempt. If the third attempt fails, it waits 20 seconds before +doing a fourth attempt (each attempt doubles the previous wait period). +When an attempt fails, the Couch log will show you something like:

+
[error] [<0.149.0>] Error starting replication `67c1bb92010e7abe35d7d629635f18b6+create_target` (document `my_rep_2`): {db_not_found,<<"could not open http://myserver:5986/foo/">>
+
+
+
+

Note

+

The _replication_state field is only set to error when all +the attempts were unsuccessful.

+
+

There are only 3 possible values for the _replication_state field: +triggered, completed and error. Continuous replications +never get their state set to completed.

+
+
+

5.2. Documents describing the same replication

+

Lets suppose 2 documents are added to the _replicator database in +the following order:

+
{
+    "_id": "doc_A",
+    "source":  "http://myserver.com:5984/foo",
+    "target":  "bar"
+}
+
+
+

and

+
{
+    "_id": "doc_B",
+    "source":  "http://myserver.com:5984/foo",
+    "target":  "bar"
+}
+
+
+

Both describe exactly the same replication (only their _ids differ). +In this case document doc_A triggers the replication, getting +updated by CouchDB with the fields _replication_state, +_replication_state_time and _replication_id, just like it was +described before. Document doc_B however, is only updated with one +field, the _replication_id so it will look like this:

+
{
+    "_id": "doc_B",
+    "source":  "http://myserver.com:5984/foo",
+    "target":  "bar",
+    "_replication_id":  "c0ebe9256695ff083347cbf95f93e280"
+}
+
+
+

While document doc_A will look like this:

+
{
+    "_id": "doc_A",
+    "source":  "http://myserver.com:5984/foo",
+    "target":  "bar",
+    "_replication_id":  "c0ebe9256695ff083347cbf95f93e280",
+    "_replication_state":  "triggered",
+    "_replication_state_time":  1297974122
+}
+
+
+

Note that both document get exactly the same value for the +_replication_id field. This way you can identify which documents +refer to the same replication - you can for example define a view which +maps replication IDs to document IDs.

+
+
+

5.3. Canceling replications

+

To cancel a replication simply DELETE the document which triggered +the replication. The Couch log will show you an entry like the +following:

+
[Thu, 17 Feb 2011 20:16:29 GMT] [info] [<0.125.0>] Stopped replication `c0ebe9256695ff083347cbf95f93e280+continuous+create_target` because replication document `doc_A` was deleted
+
+
+
+

Note

+

You need to DELETE the document that triggered the replication. +DELETE-ing another document that describes the same replication +but did not trigger it, will not cancel the replication.

+
+
+
+

5.4. Server restart

+

When CouchDB is restarted, it checks its _replicator database and +restarts any replication that is described by a document that either has +its _replication_state field set to triggered or it doesn’t have +yet the _replication_state field set.

+
+

Note

+

Continuous replications always have a _replication_state field +with the value triggered, therefore they’re always restarted +when CouchDB is restarted.

+
+
+
+

5.5. Changing the Replicator Database

+

Imagine your replicator database (default name is _replicator) has the +two following documents that represent pull replications from servers A +and B:

+
{
+    "_id": "rep_from_A",
+    "source":  "http://aserver.com:5984/foo",
+    "target":  "foo_a",
+    "continuous":  true,
+    "_replication_id":  "c0ebe9256695ff083347cbf95f93e280",
+    "_replication_state":  "triggered",
+    "_replication_state_time":  1297971311
+}
+
+
+
{
+    "_id": "rep_from_B",
+    "source":  "http://bserver.com:5984/foo",
+    "target":  "foo_b",
+    "continuous":  true,
+    "_replication_id":  "231bb3cf9d48314eaa8d48a9170570d1",
+    "_replication_state":  "triggered",
+    "_replication_state_time":  1297974122
+}
+
+
+

Now without stopping and restarting CouchDB, you change the name of the +replicator database to another_replicator_db:

+
$ curl -X PUT http://localhost:5984/_config/replicator/db -d '"another_replicator_db"'
+"_replicator"
+
+
+

As soon as this is done, both pull replications defined before, are +stopped. This is explicitly mentioned in CouchDB’s log:

+
[Fri, 11 Mar 2011 07:44:20 GMT] [info] [<0.104.0>] Stopping all ongoing replications because the replicator database was deleted or changed
+[Fri, 11 Mar 2011 07:44:20 GMT] [info] [<0.127.0>] 127.0.0.1 - - PUT /_config/replicator/db 200
+
+
+

Imagine now you add a replication document to the new replicator +database named another_replicator_db:

+
{
+    "_id": "rep_from_X",
+    "source":  "http://xserver.com:5984/foo",
+    "target":  "foo_x",
+    "continuous":  true
+}
+
+
+

From now own you have a single replication going on in your system: a +pull replication pulling from server X. Now you change back the +replicator database to the original one _replicator:

+
$ curl -X PUT http://localhost:5984/_config/replicator/db -d '"_replicator"'
+"another_replicator_db"
+
+

Immediately after this operation, the replication pulling from server X +will be stopped and the replications defined in the _replicator +database (pulling from servers A and B) will be resumed.

+

Changing again the replicator database to another_replicator_db will +stop the pull replications pulling from servers A and B, and resume the +pull replication pulling from server X.

+
+
+

5.6. Replicating the replicator database

+

Imagine you have in server C a replicator database with the two +following pull replication documents in it:

+
{
+     "_id": "rep_from_A",
+     "source":  "http://aserver.com:5984/foo",
+     "target":  "foo_a",
+     "continuous":  true,
+     "_replication_id":  "c0ebe9256695ff083347cbf95f93e280",
+     "_replication_state":  "triggered",
+     "_replication_state_time":  1297971311
+}
+
+
+
{
+     "_id": "rep_from_B",
+     "source":  "http://bserver.com:5984/foo",
+     "target":  "foo_b",
+     "continuous":  true,
+     "_replication_id":  "231bb3cf9d48314eaa8d48a9170570d1",
+     "_replication_state":  "triggered",
+     "_replication_state_time":  1297974122
+}
+
+
+

Now you would like to have the same pull replications going on in server +D, that is, you would like to have server D pull replicating from +servers A and B. You have two options:

+
    +
  • Explicitly add two documents to server’s D replicator database
  • +
  • Replicate server’s C replicator database into server’s D replicator +database
  • +
+

Both alternatives accomplish exactly the same goal.

+
+
+

5.7. Delegations

+

Replication documents can have a custom user_ctx property. This +property defines the user context under which a replication runs. For +the old way of triggering replications (POSTing to /_replicate/), +this property was not needed (it didn’t exist in fact) - this is because +at the moment of triggering the replication it has information about the +authenticated user. With the replicator database, since it’s a regular +database, the information about the authenticated user is only present +at the moment the replication document is written to the database - the +replicator database implementation is like a _changes feed consumer +(with ?include_docs=true) that reacts to what was written to the +replicator database - in fact this feature could be implemented with an +external script/program. This implementation detail implies that for non +admin users, a user_ctx property, containing the user’s name and a +subset of his/her roles, must be defined in the replication document. +This is ensured by the document update validation function present in +the default design document of the replicator database. This validation +function also ensure that a non admin user can set a user name property +in the user_ctx property that doesn’t match his/her own name (same +principle applies for the roles).

+

For admins, the user_ctx property is optional, and if it’s missing +it defaults to a user context with name null and an empty list of roles +- this mean design documents will not be written to local targets. If +writing design documents to local targets is desired, the a user context +with the roles _admin must be set explicitly.

+

Also, for admins the user_ctx property can be used to trigger a +replication on behalf of another user. This is the user context that +will be passed to local target database document validation functions.

+
+

Note

+

The user_ctx property only has effect for local endpoints.

+
+

Example delegated replication document:

+
{
+     "_id": "my_rep",
+     "source":  "http://bserver.com:5984/foo",
+     "target":  "bar",
+     "continuous":  true,
+     "user_ctx": {
+          "name": "joe",
+          "roles": ["erlanger", "researcher"]
+     }
+}
+
+
+

As stated before, for admins the user_ctx property is optional, while +for regular (non admin) users it’s mandatory. When the roles property of +user_ctx is missing, it defaults to the empty list [ ].

+
+
+ + +
+
+
+ +
+
+ + + + \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/search.html couchdb-1.4.0~rc.1/share/doc/build/html/search.html --- couchdb-1.2.0/share/doc/build/html/search.html 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/search.html 2013-08-23 10:59:02.000000000 -0400 @@ -0,0 +1,177 @@ + + + + + + + + + + Search — Apache CouchDB 1.4 Manual + + + + + + + + + + + + + + + + + + +
+
+
+
+ +

Search

+
+ +

+ Please activate JavaScript to enable the search + functionality. +

+
+

+ From here you can search these documents. Enter your search + words into the box below and click "search". Note that the search + function will automatically search for all of the words. Pages + containing fewer words won't appear in the result list. +

+
+ + + +
+ +
+ +
+ +
+
+
+
+
+ + + + + + + +

Utilities

+ + + + +

More Help

+ + +
+
+
+
+ + + + \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/searchindex.js couchdb-1.4.0~rc.1/share/doc/build/html/searchindex.js --- couchdb-1.2.0/share/doc/build/html/searchindex.js 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/searchindex.js 2013-08-23 10:59:02.000000000 -0400 @@ -0,0 +1 @@ +Search.setIndex({objects:{"":{isArray:[4,0,1,""],log:[4,0,1,""],registerType:[4,0,1,""],provides:[4,0,1,""],validatefun:[3,0,1,""],send:[4,0,1,""],emit:[4,0,1,""],GetRow:[4,0,1,""],Emit:[4,0,1,""],start:[4,0,1,""],JSON:[4,1,1,""],getRow:[4,0,1,""],FoldRows:[4,0,1,""],sum:[4,0,1,""],Send:[4,0,1,""],redfun:[3,0,1,""],toJSON:[4,0,1,""],Start:[4,0,1,""],require:[4,0,1,""],Log:[4,0,1,""]}},terms:{"231bb3cf9d48314eaa8d48a9170570d1":20,"7a05370bff53186cb5d403f861aca154":6,prefix:[13,15,20,5,10],sleep:10,utc_id:[13,10],bike:10,under:[5,14,10,11,19,20],aux:11,digit:[8,13,15],everi:[8,3,4,10],risk:13,bind_address:[17,18,5],affect:[8,15,6],eshel:15,viewabl:17,upload:[13,10,14,6],zucchiniinagrodolcesweet:6,verif:5,c01:15,zlib:10,coucdb:2,max_http_sess:[17,18],slp:8,"04cfa71e0deabd_i_am_in_yer_couch":13,direct:[10,12,2,5,6],batch:[12,13,8,10],second:[12,2,3,20],even:[3,5,14,10,8,6,12,15,16],couchdb_opt:10,include_sasl:[17,18],clenaup:10,r15b:10,neg:10,zesti:6,asid:15,"7156254d":10,"new":[3,5,14,10,7,8,6,11,12,13,19,18,16,20],metadata:10,"2d2c7d1e":10,ongo:20,elimin:[10,6],behavior:[15,8,3],reduce_limit:[17,18,3],never:[12,8,20],here:[4,3,5,14,8,15,16],path:[0,4,2,5,14,6,7,9,10,13,15,18,8],interpret:[5,10],precis:15,credit:15,permit:[4,10],aka:10,portabl:10,db_not_found:[2,20],skippabl:14,view_index:[15,6],unix:[11,19,20,5],"_delet":[15,8,3,10],txt:[14,5,4],unit:2,highli:14,describ:[1,2,4,9,11,12,15,8,20],would:[1,5,14,10,6,11,13,15,17,8,20],ef9ac469:10,program:[20,10],call:[4,3,5,14,10,6,12,13,19,18,8],recommend:[14,5],type:[4,2,3,5,14,6,7,8,10,13,19,15,18,16],until:[12,8,2,16],unescap:10,relax:4,relat:[14,3,5,4,10],notic:[11,15,10],hurt:11,warn:[14,15,5,4,10],validate_doc_upd:[3,4,10],instance_start_tim:[8,19,15,10],hold:[7,15],origin:[1,2,5,14,10,8,11,13,17,16,20],must:[4,2,3,5,14,6,7,10,13,19,18,8,20],ert:15,join:3,restor:10,couch_stats_aggreg:18,setup:[5,10],work:[1,2,3,5,14,10,16],os_process_timeout:[17,18],xhr:10,server_kei:5,authentication_redirect:[17,18],root:[2,4],overrid:[16,15,5,10],give:[11,15],standpi:6,british:6,want:[2,5,14,10,7,11,19,8],end:[2,4,10,6,12,8],quot:[2,3,14,10,19,8],zingi:6,how:[3,14,10,13,15,16],answer:15,verifi:[19,3,5,10],config:[15,3,10],"3db559f13a845c7751d407404cdeaa4a":6,"3bbb2612":10,updat:[1,4,2,3,5,14,6,8,10,12,13,19,15,18,16,20],couch_httpd_misc_handl:18,recogn:10,lai:10,x509:5,earlier:[14,5],couch_query_serv:18,befor:[3,4,10,8,6,13,19,18,16,20],wrong:10,all_doc:[16,10],couch_httpd:[18,5,10],demonstr:3,attempt:[15,20,10],third:[3,20],bootstrap:[11,10],credenti:[17,2,5],lost:8,receiv:[12,8,2,3],maintain:5,environ:[18,14,10],incorpor:13,enter:19,exclus:17,order:[3,5,6,8,13,15,16,20],missing_found:[2,15],ibrows:10,over:[4,5,10,19,15,16],orang:6,becaus:[8,20,14,6],request_token:0,privileg:[2,6,7,13,15,18,8],oven:6,erjkmgmb8xucaaaaasuvork5cyii:3,flexibl:[5,10],refs_info:13,cli:10,fix:10,avocado:6,better:[11,14,10],persist:10,erlang:[1,4,3,5,14,10,11,13,15,18,20],descend:[8,10,16,6],them:[3,4,10,8,11,15,16],anim:10,httpd_db_handler:[17,1,18],a1a9b39ee3cc39181b796a69cb48521c:13,thei:[4,2,3,5,14,6,8,10,11,12,13,15,16,20],fragment:9,authdb:2,safe:11,sumsqr:3,"break":10,db_name:[8,19,15],promis:11,bread:[13,8],meat:6,dumpl:8,accommod:8,localis:5,timeout:[2,10,16,17,18,8],each:[4,2,3,5,14,6,8,10,12,13,15,16,20],debug:3,"8d300b86622d67953d102165dbe99467":4,side:[19,3,14,5,10],mean:[2,3,4,14,10,15,8,20],resum:[20,14],sdch:15,logo:3,extract:[4,6],network:[2,5,4],goe:10,newli:[19,10],smp:[15,10],content:[1,2,3,5,14,6,10,13,19,15,18,8,20],rewrit:[10,1,9,5,6],adapt:10,reader:[8,15,10],view_index_dir:[17,18],got:[11,3],ejson:[11,15],gov:5,navig:[19,10],written:[2,4,13,15,8,20],situat:[15,13,2,3,6],infin:10,free:[11,10,6],standard:[5,14,6,7,13,15,8],eec205a9d413992850a6e32678485900:16,recipelist:6,"22egg":6,f755c413badf66b22941313f9f001e28:13,md5:[10,15,14,6],precompil:3,rep_from_x:20,ccb7igzvbnqtc2l6ztogmtjwddsgfqo:13,"8d7ab8b1":10,traceback:10,openssl:5,f3b15bb408961f8dcc3d86c7d3b54c4c:6,filter:[1,3,10,8,6,12,15,16,20],pagin:10,isn:[3,5],rep_from_b:20,rep_from_a:20,subtl:15,onto:[15,3],chainabl:10,updatenam:[9,6],rang:[1,2,3,14,10,6],render:10,clariti:[19,8,10],restrict:[8,16,5,4],unlik:[13,4,6],alreadi:[2,3,5,10,12,13,8],wasn:10,massiv:10,primari:[19,3,14],hood:14,spinach:6,top:[2,5,10,11,19,15,8],sometim:10,stack:[3,5,10],master:[11,1,12,10],too:3,stats_collector:[17,18],listen:[16,5],foo_a:20,foo_b:20,incub:10,signup:10,took:15,"29d748a6e87b43db967fe338bcb08d74":3,foo_x:20,db_role:3,adob:10,rereduc:[3,4],target:[2,3,5,10,6,12,13,15,20],keyword:6,provid:[0,4,2,3,5,14,6,7,8,10,13,19,15,18,16],tree:[11,13,10],rate:[17,13,18],"final":[11,5],project:[1,5],"5ur3rebn":3,handle_doc_update_req:18,runner:10,mind:3,mine:11,raw:[2,5,10,6,13,15],increment:[13,10,6],seen:[10,6],seem:3,incompat:10,df8eca9da37dade42ee4d7aa34024714:3,strength:12,recreat:6,unresolv:10,latter:17,especi:[12,19,14,4,6],terrin:6,contact:2,transmit:[5,10],expens:[8,10],simplifi:16,though:[15,14],usernam:[19,3],object:[10,1,4,2,3,5,14,6,8,9,15,18,16,20],what:[3,5,14,13,15,20],microsecond:[8,15],letter:8,bsd:10,everyth:[5,10],tradit:6,"_config":[2,5,10,9,13,17,18,20],don:[3,4,5,10,11,15,8],dom:[16,10],doc:[10,1,4,2,3,5,14,6,7,9,11,13,15,8],"_all_db":[17,18,2,9],doe:[4,2,3,5,14,6,8,12,13,15,16],bracket:14,wildcard:[5,10],scotch:6,section:[10,2,3,4,5,6,8,9,19,15,17,18,16,20],c9df0cdf4442f993fc5570225b405a80:2,whitelist:[17,5,10],random:[13,2,18],syntax:10,buildbot:10,identifi:[2,5,14,10,9,8,20],"272d6415":10,involv:12,shellfish:6,menu:19,configur:[10,1,4,2,5,14,6,7,9,11,19,15,17,18,16,20],apach:[11,1,4,10],bust:10,rich:6,oct:2,stop:[2,4,5,6,8,12,15,16,20],report:[15,8,3,14,10],reconstruct:6,bar:20,ietf:5,method:[0,1,4,2,5,14,6,7,9,10,12,13,15,17,18,8],twice:8,bad:[2,14,10],dfc5d37c:10,respond:[13,5],"_secur":[9,8],datatyp:15,mandatori:20,result:[10,2,3,4,14,6,8,9,13,15,16],linefe:8,fail:[2,5,14,10,16,8,20],key_fil:5,best:[8,3],subject:[16,10],hopefulli:15,databas:[10,1,2,3,5,14,6,7,8,9,12,13,19,15,17,16,20],waiting_cli:[15,6],update_notif:6,figur:[19,10],outstand:[15,6],simplest:[19,14],yogurt:6,irb:15,raw_path:15,couch_httpd_oauth:[18,10],attribut:[16,10],accord:[9,6],extend:[8,13,10,15,6],pointer:5,garlicmayonnais:[8,6],"_temp_view":[17,18,9,8],doc_del_count:[8,19,15],upfront:5,toler:10,update_aft:[10,6],kitchen:6,"_replication_st":20,protect:[5,10],cor:[17,1,16,5,10],easi:[3,5,14,6,11,19,15],cow:6,howev:[19,3,20,6],against:[10,3,6],logic:3,login:[0,10],browser:[5,14,10,12,19,16],com:[11,17,20,5],compromis:5,raita:6,theoret:15,default_authentication_handl:18,guid:[3,14],assum:[3,14,10],summar:8,duplic:[15,4,10],haxx:5,beef:8,chrome:[15,10],fri:[13,20,5],three:[15,8,3,14,6],been:[2,3,5,14,6,7,10,13,19,15,8],much:[15,3,10],total_row:[8,15,6],basic:[1,2,3,5,14,6,8,10,13,19,16,20],vpath:10,server_cert:5,suppress:15,argument:[2,3,4,5,6,7,10,13,19,8],"_log":[10,17,18,2,9],"catch":[4,10],viewnam:[15,3,6],ident:[7,2,8,6],servic:[17,5],properti:[3,4,10,12,15,17,16,20],lessen:6,aim:[12,2],calcul:[8,15,10],"04cfa718b00848_i_am_in_yer_couch":13,publicli:10,occas:6,"43febce5675468a5467fb5467ce9e6c0":13,timefram:10,tabl:[2,3,4,10,6,11,13,8],new_mail:3,wgyj7iy9jaa:3,need:[4,2,3,5,14,6,8,10,11,19,15,16,20],conf:10,max_replication_retry_count:[20,10],sever:[12,3,10],"7d418134":10,incorrectli:10,perform:[2,3,5,14,10,13,19,8],suggest:14,make:[3,5,14,10,8,6,11,12,13,15,16],couchdb:[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20],complex:10,split:[5,10],complet:[2,5,14,10,6,19,15,18,8,20],nil:4,hang:10,hand:[8,15],fairli:15,rais:[15,3,10],garlic:6,ownership:10,pread_iolist:10,redfun:3,kept:8,foldrow:4,thu:[2,20,14],scone:8,"91635098bfe7d40197a1b98d7ee085fc":8,client:[1,2,3,5,14,6,8,10,12,19,15,18,16],thi:[10,1,4,2,3,5,14,6,8,9,11,12,13,19,15,18,16,20],gzip:[15,10],alabama:6,"_doc_id":[8,10],left:19,tiersalmonspinachandavocadoterrin:6,"_missing_rev":[9,8],protocol:[17,19,5,10],just:[3,5,14,10,8,6,11,13,15,18,16,20],"_admin":[15,3,20,4],yet:[11,20],languag:[4,14,10,6,19,15],previous:[2,3,4],query_param:[15,20,10],xmlhttprequest:[5,10],expos:[10,20,6],"_db_updat":[2,9],had:[3,14],els:[3,4],expon:15,applic:[4,2,3,5,14,6,8,10,12,13,19,15,18,16],which:[1,4,2,3,5,14,6,8,10,11,12,13,19,15,18,16,20],preserv:[15,10],background:[4,5,14,19,17,8],"2921c78":10,handle_view_req:18,aberffraw:6,daemon:[1,3,5,10,17,18],specif:[2,3,5,14,6,7,8,10,13,15,17,18,16],arbitrari:[8,5,10],manual:[10,8,6],tcp_nodelai:[17,10],unnecessari:[16,10],underli:[19,15,6],www:[18,15,5,4,10],right:[11,8,19,3,14],old:[10,8,20,5,6],cuisin:6,"56744f2f":10,handle_log_req:18,dead:16,intern:[2,3,14,10,15,8,20],flatten:4,successfulli:[2,14,10,7,6,13,18,8],ico:[17,18,2,9],is_server_or_database_admin:3,bottom:11,normal:[2,14,10,8,15,16],changes_timeout:16,track:[13,8,6],suffici:5,condit:10,foo:[15,20],requested_path:[15,10],localhost:[20,5],design_doc:8,core:10,bold:6,peel:6,compress:[8,10],insecur:5,"917d8988":10,repositori:10,peer:15,post:[0,2,3,5,14,6,9,10,11,13,19,15,8,20],chapter:11,obj:4,slightli:[8,15,10],bcbdf724f8544c89697a1cbc4b9f0178:6,doc_count:[8,19,15],"2774531ff2":10,commit:[3,10,7,6,11,13,15,8],produc:[8,3,4,10],"float":[15,14,10],encod:[3,4,14,10,6,11,13,15],bound:10,down:10,"7c4740b4dcf26683e941d6641c00c39d":13,resili:10,wrap:15,subl:11,storag:[11,8,3,10],microwav:[8,6],accordingli:10,git:[11,4,10],wai:[3,5,14,6,11,12,19,15,8,20],handle_design_info_req:18,"1d98":15,support:[1,4,2,3,5,14,6,7,8,10,13,19,15,16],transform:3,"class":10,avail:[0,2,3,5,14,6,7,10,12,13,19,18,8],janl:11,reli:8,editor:[11,19,10],interv:8,fork:11,head:[1,4,2,3,5,14,10,9,11,13,15],creation:10,genrsa:5,offer:10,forc:[10,6],listfun:3,"true":[4,2,3,5,14,6,7,8,10,12,13,19,15,17,18,16,20],"throw":[8,3],open_databas:2,flavour:6,start_kei:10,zingylemontart:[8,6],maximum:[8,2,16,10],tell:[11,16,20],endkey_docid:[8,6],independ:[8,10],ol1ka:2,emit:[3,4,10,8,6,16],wmh:3,ddoc:[15,3,4,10],featur:[3,5,14,10,16,20],cb2b7d94eeac76782a02396ba70dfbf5:6,r14b04:15,r14b03:10,r14b02:10,lveliy253na3pvhqqdpt0f:3,exist:[2,3,5,10,6,12,13,19,15,8,20],erlang_oauth:10,check:[2,3,4,10,19,15,8,20],password:[2,5,10,19,17,8],encrypt:5,when:[2,3,5,14,6,7,10,12,13,19,15,17,8,20],refactor:10,flood:10,role:[15,8,3,20,10],test:[4,5,14,10,11,19],committed_update_seq:[8,19,15,10],node:[12,15],scotchcollop:6,notif:[1,3,5,6,16,8],auth_cache_s:[17,18],diet:6,consid:[15,10],easili:[2,14],"_count":3,faster:[3,4,10],end_last_seq:[2,15],potato:6,pbkdf2:[5,10],r16b:10,carriag:8,pathnam:5,time:[2,3,5,10,8,6,11,13,19,15,16,20],c37204b7:10,couch_batch_sav:10,spidermonkei:[11,15,10],skip:[11,12,8,16,6],global:[17,4,10],cacert:5,signific:[8,15,14],last_seq:[15,8,3,16],claret:6,computation:8,row:[3,4,10,8,6,15,16],millisecond:[8,2,16],delayed_commit:[17,18],depend:[8,5,10],flash:10,readabl:[5,10],sourc:[1,2,10,6,11,12,13,15,16,20],string:[4,2,3,5,14,6,7,8,10,13,15,18,16],revalid:[13,14,5],cook:[14,6],exact:8,r16b01:10,administr:[1,2,14,5,10],level:[2,4,5,10,6,15,17,18,8],did:20,khtml:15,iter:[17,15,3,4],item:[16,14,4],unsupport:14,icu:10,quick:[15,14,10],div:6,gentlewoman:6,prevent:[16,8,3,10],sign:[3,5],native_query_serv:4,bake:8,port:[4,5,10,19,17,18],appear:[12,15,16,10],"55b6a1b251902a2c249b667dab1c6692":[13,6],current:[2,3,4,5,6,7,8,10,11,13,19,15,18,16,20],ampersand:19,yogurtraita:[8,6],corn:6,update_seq:[15,8,10,19,6],deriv:17,dropdown:10,honour:2,gener:[2,3,14,10,9,6,13,19,15,17,18,8],unauthor:[2,3,14],"_fti":5,satisfi:14,handle_favicon_req:18,slow:10,modif:[8,10],address:[2,15,5,10],along:[11,10],wait:[3,6,16,12,15,8,20],box:19,"_oauth":[0,17,18],c307ba95:10,checksum:10,bot:10,queue:10,throughput:10,behav:15,bob:15,reclaim:[8,10],couch_httpd_view:18,semant:[7,8,15,10],regardless:19,extra:[15,10],modul:[3,4,10],instal:[11,19,5,10],mobil:2,regex:5,httpd:[1,2,5,17,18,16],memori:[13,15,10],ca3:15,univers:2,todai:5,perl:[8,14],live:11,handler:[1,3,4,5,10,17],msg:4,reorder:10,"_googl":5,snappy_nif:10,oom:10,peopl:10,httpd_design_handl:[17,1,18,10],finit:15,futon:[1,2,19,5,10],appendix:15,"_sum":[3,10],indic:[2,3,14,10,6,13,15,18,8],examin:[8,3],start_key_doc_id:10,effort:15,easiest:14,d076976c:10,my_rep:20,prepar:10,uniqu:[13,2,15,5],cat:14,descriptor:[2,10],sour:6,can:[4,2,3,5,14,6,7,8,10,11,12,13,19,15,16,20],citru:6,purpos:[17,8,3,10],c9df0cdf4442f993fc5570225b406a20:2,overwritten:5,by_recip:6,favourit:11,stream:[16,2,15,14,10],backslash:[14,10],agent:[8,15,10],ssl3_get_server_certif:5,"2057b895":10,abort:[15,3],start_link:[18,5,4],"77cb":15,"9f53704a":10,alwai:[16,13,3,20,10],multipl:[5,14,10,6,15,8],charset:[13,15,14,4],ping:5,ingredtext:6,write:[2,3,10,6,11,13,15,8,20],purg:[8,10,15,6],foreach:3,fourth:20,"72ea7e38":10,xhtml:[15,4],flang:10,map:[4,3,5,14,10,8,6,15,16,20],product:[5,6],mar:20,book:[16,6],max:[2,3],clone:11,mac:[19,10],mai:[4,2,3,5,14,6,10,19,15,17,18,8],hob:6,underscor:[9,3,5,10],data:[0,1,2,3,4,5,6,16,8,10,12,13,14,15,19],newlin:[2,8,10],repo:4,divid:2,explicit:[14,6],inform:[0,1,2,3,4,5,6,7,8,10,13,14,15,16,19,20],"switch":10,preced:16,combin:[8,3,14,6],maxsequ:16,callabl:4,clients_requesting_chang:2,talk:19,a19a1a5ecd946dad70e85233ba039ab2:13,"_rev":[3,4,6,13,19,15,8],ttl:5,gitignor:11,still:[8,19,15,4,10],authentication_db:[17,18],dynam:[2,3,14,5],entiti:[9,3],group:[1,2,3,14,10,6,17,8],monitor:[8,10,19,6],concis:15,polici:5,"07a6af222":10,precondit:[2,14],"0d50103cfd":10,platform:[19,15,10],window:[11,16,19,15,10],fb670f5712:10,mail:[11,1,3],updater_run:[15,6],main:[3,14,10,6,11,19,18,8],non:[1,3,10,7,9,6,12,13,8,20],"5xx":10,safari:[15,10],"7e4b5a14b22ec1cf8e58b9cdd0000da3":2,nov:[13,5,4],now:[3,5,14,10,11,15,16,20],discuss:15,nor:5,possess:5,term:[5,10],handle_stats_req:18,name:[2,3,5,10,9,6,13,19,15,18,8,20],mochijson2:10,adukiandorangecasserol:[8,6],revers:[13,10,15,6],couch_httpd_auth:[17,1,18,10],crypto:10,separ:[2,5,14,10,9,13,17,8],waiting_commit:[15,6],millisec:16,compil:[3,4],by_statu:3,domain:5,replac:[15,3,5,10],individu:[2,3,5,6,8,9,13,19,17,18,16],continu:[1,2,3,10,8,12,19,15,16,20],significantli:[10,6],year:15,happen:[2,5,10,15,16,20],shown:[2,10,13,19,15,8],yawx64odeh47bhiv:3,space:[10,8,6],"857c7cbeb6c8dd1dd34a0c73e8da3c44":8,urlencod:[15,4],start_last_seq:[2,15],internet:10,correct:[8,14,10],user_rol:3,migrat:[1,12,10],argu:15,request_tim:2,headlin:11,aioli:[8,6],ajax:[5,10],mime:[4,14,10,6,13,15],org:[2,3,4,5,10,11],"byte":[2,5,14,10,6,13,15,8],care:[14,3,5,4,10],funki:15,couldn:10,"34c318924a8f327223eed702ddfdc66d":8,synchron:[2,5,10,12,13,15,8],refus:10,recov:6,thing:[16,14,10],place:[11,8,15,5],bdd3bf3563bee516b96885a66c743f8:8,principl:20,think:15,frequent:[16,10],first:[2,3,5,14,10,6,11,12,13,15,16,20],oper:[2,5,14,10,6,19,15,17,8,20],handle_session_req:18,"_restart":[10,17,18,2,9],directli:[19,14,5],kernel:15,onc:[12,2,19,5,10],arrai:[2,3,4,14,6,8,13,15,18,16],yourself:5,docs_written:[2,15],submit:[14,10,6,12,13,19,8],oppos:11,open:[2,10,8,11,15,16,20],predefin:4,size:[8,13,10,15,6],given:[3,6,7,8,13,15,16],christma:6,workaround:5,bookmark:10,associ:[4,14,6,16,13,8],bite:15,callout:10,main_onli:16,conveni:6,friend:3,c98ba561:10,uc3bjpef:3,cope:5,b58f069167:10,copi:[2,14,10,7,9,6,11,12,13],specifi:[4,2,3,5,14,6,7,8,10,13,19,15,18,16],"short":[10,6],enclos:14,mostli:[11,3],date:[13,2,15,14,5],than:[3,4,10,8,6,13,15,16],png:3,acc2:4,serv:[5,14,10,6,13,8],wide:10,were:[2,4,10,8,6,12,13,16,20],posit:[15,10],surrog:10,mysecretpassword:5,seri:10,pre:[3,10],sai:[3,20,14],"_local":[7,9,20],ani:[4,3,5,14,10,8,13,19,15,16,20],collat:[2,10],sat:4,commonj:[3,4,10],engin:[11,10],squar:14,advic:10,alias:10,portenti:10,ce48342:10,disk_format_vers:[8,19,15],note:[1,4,2,3,5,14,6,8,10,11,13,19,15,17,18,16,20],other:[10,2,3,5,14,6,7,8,9,11,13,19,15,17,16],ideal:19,take:[3,14,10,8,19,16],green:[13,19],noth:[15,8,3,16],begin:8,sure:[3,5,14,10,8,16],importantli:15,trace:[15,10],couch_stats_collector:18,buffer:10,handle_welcome_req:18,buffet:6,r13b02:10,pair:[3,4,14,10,15,18],icon:2,renam:10,textarea:10,egg:6,seafood:6,later:[11,3,10],quantiti:[2,8],runtim:10,"825cb35de44c433bfb2df415563a19d":16,link:10,fishstew99:13,salt:[3,5],gracefulli:3,multilingu:10,shot:3,show:[10,1,3,4,14,6,9,13,19,20],database_writ:2,concurr:10,permiss:[4,10],"561bf6b1e27615cee83d1f48fa65dd3":13,line:[2,4,5,10,8,6,13,19,16],xml:[15,18,3,4,10],onli:[4,2,3,5,14,6,8,10,12,13,15,16,20],explicitli:[8,19,16,20,14],transact:8,activ:[1,2,3,10,12,19,15],behind:5,cc6ff71af716ddc2ba114967025c0ee0:3,newdoc:[3,4],c9df0cdf4442f993fc5570225b405bd2:2,offici:5,external_manag:[17,18],zestyseafoodavocado:6,nearli:14,variou:[18,3,14,10],get:[0,2,3,5,14,6,7,8,9,10,11,12,13,19,15,18,16,20],w3aaaag0leqvr4xp3lrq4dqrbd0qqtm4y5:3,clang:15,zabaglion:6,ssl:[17,1,5,10],cannot:[7,13,8,14,10],utf:[2,3,4,14,10,6,13,15],requir:[4,2,3,5,14,6,7,8,10,13,19,15,17,18,16],dramat:10,isarrai:[3,4,10],where:[2,5,14,10,6,11,12,13,15,8,20],summari:13,wiki:3,couchdb_pid_fil:10,users_db_publ:17,picnic:6,handle_compact_req:18,proce:2,seal:[3,10],calendar:4,ieee:15,infinit:15,detect:10,enumer:4,enough:10,r15b02:5,between:[2,5,14,10,6,12,19,8],"import":[2,15,14,10],across:12,spars:10,parent:[8,13,15,4,6],screen:[19,5,10],rare:10,tum:16,come:[10,6],uuid:[1,2,3,10,13,15,17,18],resubmit:8,substituion:10,tutori:[11,5],improv:[13,10],among:[19,10],overview:19,inspir:[11,15],period:[8,16,20],b59ac98b:10,colon:14,handle_view_list_req:18,poll:[1,8,15,16],caret:10,erl:4,coupl:15,myserv:20,rebuild:[10,8,6],classic:[3,6],mark:[11,19,8,20],rebuilt:[8,6],"84elywv":3,sslcert:5,resolut:[8,10],rubi:[15,14],wake:10,c9df0cdf4442f993fc5570225b405e42:2,"7f7638c86173eb440b8890839ff35433":8,former:[19,16],those:[19,15,14,10],sound:11,alabamapeanutchicken:6,access_token:0,plugin:10,invok:[3,10],public_field:17,invoc:10,ac58d589b37d01c00f45a4418c5a15a8:6,advantag:14,stdout:10,couch_uuid:18,henc:2,worri:11,destin:[2,5,6,7,12,13],strain:16,uncom:5,myapp:8,b90e4021:10,ascii:11,handle_utils_dir_req:18,develop:[11,1,5,10],author:[0,11,3,14,5],alphabet:10,"7051cbe5c8faecd085a3fa619e6e6337":16,intermediari:5,same:[1,2,3,5,14,6,8,10,12,13,19,15,17,16,20],couch_httpd_show:18,epoch:[8,13,15],html:[4,3,5,14,10,11,15,18],pai:5,document:[10,1,4,2,3,5,14,6,7,8,9,11,12,13,19,15,17,16,20],status:[3,14],exhaust:10,ifram:10,finish:[12,20],nest:[3,10],confidenti:10,"47ba":15,someon:4,getrow:[3,4],driven:3,capabl:14,a6eaf9f1:10,qbqn2agp:3,mani:[3,5,14,10,19,16],extern:[20,5,10],designdoc:[16,3],appropri:[17,19,5,4],markup:11,without:[4,2,3,5,14,10,15,8,20],compression_level:[17,18],execut:[4,5,10,6,19,8],excel:[3,6],gavin:10,tolowercas:3,nicer:3,bea76dbf:10,speed:6,startkei:[10,8,6],sndbuf:5,"2a74f88375":10,tim:8,view_manag:[17,18],trigger:[1,3,14,10,6,12,20],except:[7,8,15,4,10],littl:[11,16,14],r13b:[13,14],blob:[4,10],vulner:10,httpsd:[17,5],real:[16,14],around:[15,10],libm:10,read:[2,5,14,10,13,15],arriv:5,olddoc:[3,4],categoris:14,"573a7bb9":10,world:[17,3],reap:10,init_resp:4,aberffrawcak:[8,6],b1a049bb:10,integ:[8,15,14,10],server:[1,4,2,3,5,14,10,8,9,13,19,15,17,18,16,20],either:[2,3,5,14,6,8,13,19,16,20],output:[5,10,7,6,19,15,8],manag:[19,5,10],mochiweb:[1,2,5,10],ascend:10,cancel:[1,2,10,12,15,20],satisfactori:15,adequ:5,io_lib:4,aadubae9eb3iebz:3,e7af4c4e9981d960ecf78605d79b06d1:8,"0c619ed":10,b213e16f:10,confirm:[19,8],definit:[5,14,10,6,13,15,8],achiev:[12,8,6],"2xx":10,notabl:10,refer:[1,5,14,10,8,9,11,15,17,16,20],wheat:6,roast:13,power:3,inspect:12,filterfun:3,broken:10,found:[4,2,5,14,10,7,8,13,19,15,16],newrol:3,e896b0b7:10,acc:4,src:[11,4],deflat:[15,10],oldrol:3,"_view":[3,14,10,9,6,17,18,16],seqnum:[16,10],meal:6,chop:6,lucen:5,stand:[11,6],act:[3,10],luck:11,backup:10,e4x:10,routin:[5,10,6,19,15,8],effici:[16,10,3,6],slash:[9,5,4,10],recbuf:5,your:[4,2,3,5,14,6,8,10,11,19,15,16,20],log:[1,2,4,10,15,17,18,20],her:20,area:[9,19,14],aren:10,hex:13,json:[10,1,4,2,3,5,14,6,7,8,9,11,13,19,15,18,16,20],data_s:[15,10],"967a00dff5e02add41819138abb3284d":16,interfac:[0,2,5,14,6,7,10,19,18,8],lot:[15,3,10],ipv6:10,submiss:8,wholem:8,handle_request:18,monoton:13,verbatim:5,tupl:5,bundl:[5,10],regard:10,jun:15,"_session":[0,17,18,2,10],replicator_db:10,longer:[3,10],pull:[11,12,2,20,10],possibl:[3,5,14,10,12,20],"default":[2,3,4,5,10,8,6,13,19,15,16,20],filternam:[8,16],f913ca6e:10,somedatabas:[16,3],embed:[13,10],deadlock:10,expect:[19,14,10],creat:[4,2,3,5,14,6,7,10,11,12,13,19,15,8],onerror:16,certain:[19,8,14],deep:3,procrow:4,file:[1,2,3,5,14,6,10,11,13,15,17,18,8],fill:[11,19],sourcourgett:6,again:[3,20],couchdb_ini_fil:10,googl:[5,10],event:[1,2,3,10,8,13,16],field:[10,2,3,5,14,6,9,13,19,15,17,8,20],valid:[1,2,3,5,14,6,7,10,11,13,15,18,8,20],preptim:6,ignor:[11,20,10],you:[10,1,4,2,3,5,14,6,7,8,9,11,13,19,15,18,16,20],juic:6,sequenc:[2,10,8,6,15,16],disk_siz:[8,19,15,6],push:[11,12,2,16,10],etap:10,concat:3,reduc:[3,4,14,10,6,15,8],bulk:[1,2,14,10,15,8],"2e0dbf7f6c4ad716f21938a016e4e59f":2,directori:[11,5,10],descript:[0,2,14,10,7,6,11,13,15,17,18,8],session_id:[2,15],"247bb19a41bfd9bfdaf5ee6e2e05be74":13,tricki:5,ad700014:10,potenti:10,escap:[3,14,10],invoic:2,degrad:3,togeth:[13,2,6],represent:[11,15,3,10],all:[10,1,4,2,3,5,14,6,8,9,11,12,19,15,17,18,16,20],dist:10,forget:[4,10],groundwork:10,forbidden:[8,2,3,14,5],scalar:8,mp3:14,abil:[12,19,10],dish:6,follow:[4,2,5,14,6,7,8,10,11,12,13,15,16,20],disk:[8,13,10,15,6],dairi:6,tail:2,no_db_fil:14,ensure_full_commit:10,queri:[1,4,2,3,5,14,6,7,8,10,13,19,15,17,18,16],introduc:[12,4,10],"case":[4,2,3,5,14,6,8,10,11,13,15,16,20],r14:10,consum:[16,20,5],straightforward:5,ingredi:[8,6],fals:[4,3,5,14,10,8,6,13,19,15,18,16],default_handl:[17,18],offlin:12,util:[11,19,15,10],mechan:[3,14],failur:[2,10,13,15,18,8],veri:[8,3,5,6],ticket:5,lib64:18,create_target:[2,15,20],"8960e91220798fc9f9d29d24ed612e0d":3,list:[0,1,2,3,4,5,6,7,8,9,10,11,13,14,15,16,17,18,19,20],adjust:[5,10],form:[4,3,5,14,10,8,19,15,16],stderr:10,loadscript:10,small:[15,3,10],"_revis":[13,15],content_typ:[13,15,10],"04cfa405381205204f75100d0241ccc3":13,tee:16,past:11,zero:15,design:[10,1,4,3,5,14,6,8,9,15,17,16,20],pass:[3,5,10,8,19,15,16,20],further:[4,2,5,14,10,8,6,12,16],somepath:5,proxi:[1,2,15,5,10],deleg:[1,20],"09ead8a0":10,sun:4,sum:[2,3,4],abl:[5,10],"_revs_limit":[9,8,10],couch_db_update_notifier_sup:18,delet:[0,2,3,5,14,6,7,8,9,10,12,13,19,15,18,16,20],version:[2,3,5,14,6,7,10,13,19,15,8],"39f56a392b86bbee57e2138921346406":6,compressible_typ:[17,18],"public":5,hasn:3,full:[4,2,5,14,10,6,13,19,15,8],hash:[14,5,10],modular:4,solari:10,ineffici:3,modifi:[10,2,3,14,6],valu:[2,3,4,14,6,7,8,10,13,19,15,18,16,20],"_stat":[2,3,10,9,17,18],search:6,include_doc:[8,10,16,20,6],cfaa66cd:10,amount:[13,15,10],authentication_handl:[17,18],"33b9fbce46930280dab37d672bbc8bb9":19,pick:2,action:[8,10],applewebkit:15,auth_cach:[17,18],via:[3,4,10],asd:13,filenam:[13,15],json2:[4,10],heurist:3,handle_task_status_req:18,select:[10,16,6,19,15,8],regist:4,two:[2,3,5,14,10,12,19,15,8,20],registertyp:[3,4],rest_syntax:11,taken:[8,14,5],chicken:[14,6],minor:10,more:[2,3,5,14,10,8,6,11,12,13,19,15,16],desir:[20,5],mozilla:[15,5],httpd_global_handl:[17,1,18,5],jsonp:[5,10],known:[7,5],compani:19,cach:[2,5,14,10,13,8],grill:6,none:[2,10,7,8,6,13,18,16],endpoint:[20,5,10],dev:[2,10],histori:[1,2,10,13,15,8],remain:[8,3,5],all_or_noth:[8,15,10],prompt:[19,10],share:[1,5,10,11,17,18,16],accept:[2,3,5,14,10,6,13,15,17,18,8],icmp_req:5,"_attach":[13,15],explor:10,dfd2199a:10,revis:[1,3,14,10,7,8,6,12,13,19,15,18,16],cours:[8,20],goal:20,freshli:8,secur:[1,3,4,5,10,15,17,8],rather:[10,8,6],anoth:[2,5,6,15,8,20],"9c65296036141e575d32ba9c034dd3e":[13,8,6],reject:10,iso:15,socket_opt:5,simpl:[3,5,10,6,13,19,15,18],css:[13,4,6],resourc:[1,5,14,10,17,16],referenc:4,minimum:[2,10],compulsori:8,reflect:[10,15,6],plane:10,"64b8":15,stabil:10,github:11,ac320479:10,confus:[15,10],auth_cache_miss:2,caus:[10,8,14,5,6],callback:10,handle_view_cleanup_req:18,media:[5,10],"_view_cleanup":[17,18,9,8,10],doc_id:[8,2,15,20,10],logrot:10,bbd93f77:10,checkbox:19,help:[1,14,10,9,15,8],totaltim:6,soon:20,handle_restart_req:18,"_design_":6,"_compact":[17,18,9,8],through:[2,3,5,14,6,11,12,19,15],paramet:[3,5,10,8,6,13,19,15,18,16,20],style:[16,13,3,10],"54fd258e":10,get_valu:4,binari:[2,15,14,4,10],pend:[3,10],rapidli:3,delici:[13,8],might:[16,2,3,5,10],recip:[13,2,8,14,6],good:[11,16,3,10],"return":[0,1,2,3,4,14,6,7,8,9,10,12,13,19,15,18,16],timestamp:[15,8,3,20],libwww:8,framework:10,"_replication_state_tim":20,bigger:3,ea37fmjyfm8g8lw:15,rfc6454:5,instruct:10,authent:[0,1,2,3,5,10,9,19,17,20],micro:10,token:[5,10],ceas:2,fulli:5,unicod:[14,10],compatibl:10,csv:4,vhost_global_handl:[17,18],couch_native_process:4,weight:6,fish:[13,8],hard:3,idea:[15,3],procedur:[1,8,12],realli:15,heavi:[14,10],connect:[2,5,10,8,15,16],reduct:[8,3,6],utc_random:[13,18],docid:3,research:20,etag:[13,14,10],print:[15,10],"127cbe3":10,msie:10,qualifi:5,group_level:[8,6],uncommit:8,advanc:14,cooki:[0,15,5,10],reason:[2,3,4,14,10,15,8],base:[0,3,5,10,6,19,8],asf:4,"_intranet":5,put:[10,2,3,5,14,6,7,8,9,13,19,15,18,16,20],basi:[13,8,5],thread:[11,15],omit:[3,10],perman:[2,8,10],heartbeat:[8,2,16,10],"_user":[18,2,3,5,10],assign:[2,15,20,14,10],feed:[1,2,3,10,8,12,16,20],"22carrot":6,major:[19,15,14,10],notifi:5,obviou:[15,14],couch_util:4,number:[1,4,2,3,5,14,6,8,10,11,13,19,15,16,20],list_to_binari:4,"5acb":15,summaris:6,done:[11,5,4,20],illegal_database_nam:10,miss:[2,5,10,7,6,13,15,18,8,20],fanci:15,differ:[2,3,5,14,10,8,6,11,12,13,19,15,18,16,20],stats_aggreg:[17,18],script:[3,20,10],interact:[19,5,10],least:[16,10],checkpoint:[12,10],veget:6,recipetitl:6,statement:[8,3,4],store:[4,3,5,14,6,7,10,12,13,15,8],luckili:5,option:[1,2,3,4,5,6,7,8,10,13,19,15,17,18,16,20],relationship:15,a94cb7e50ded1e06f943be5bfbddf8ca:13,selector:10,part:[2,5,14,10,9,19],pars:[14,10,11,15,18,16],max_http_pipeline_s:[17,18],grace:10,kind:10,max_concurrent_connect:10,doubli:5,remot:[2,19,10],remov:[8,10],handle_all_dbs_req:18,jqueri:10,reus:5,architect:5,stale:[10,8,6],randomli:10,payload:19,hasownproperti:3,handle_changes_req:18,favicon:[17,18,2,9],expir:[16,5],astut:15,"null":[3,4,10,6,15,8,20],bf1eb135:10,imagin:20,"_revs_diff":[9,8],built:[1,2,3,10,19,8],equival:[2,3,5],self:[2,5],"68a20c89a5e70357c20148f8e82ca331":8,couch:[18,3,20,4,10],also:[2,3,5,14,10,8,6,12,13,19,15,16,20],missed_named_view:10,build:[11,15,10,19,6],tool:[19,3,5],zucchini:6,distribut:[12,10],previou:[3,4,10,6,18,20],salmon:6,e23b9e942c19e9fb10ff1fde2e50e0f5:6,react:20,most:[2,3,5,14,10,8,15,16],plai:15,revs_limit:8,plan:14,b89d37509d39dd712546f9510d4a9271:2,unknown_private_path:10,rfc5789:5,"4b6475da":10,hipe:15,clear:6,cover:11,destruct:2,"_couchj":10,clean:[11,8],latest:[3,10,7,6,11,13,15,8],awesom:13,couch_external_manag:18,cdn:5,session:[0,18,2,15],particularli:2,compact_run:[8,19,15,6],font:[10,6],fine:11,find:[11,14],c9df0cdf4442f993fc5570225b4061a0:2,impact:3,access:[10,4,2,3,5,14,6,7,9,13,19,17,8],pretti:15,agrodolc:6,writer:10,solut:[2,19,14],collop:6,templat:3,factor:[5,10],darwin:15,yml:11,hit:[11,2,10],unus:8,express:[8,15],nativ:[1,10,19,5,6],proces:4,myfilt:15,rest:[11,19],restart:[1,2,4,5,10,12,20],r14b:10,ie7:10,rfc:[14,5],emfil:10,statist:[17,2,14,10],"1f443f471e5929dd7b252417629c102b":8,courgett:6,certif:5,arr:4,set:[1,4,2,3,5,14,6,8,10,12,13,19,15,17,18,16,20],seq:[15,8,3,16],couch_view:18,db_update_notifi:[17,18],see:[2,3,5,14,6,7,8,10,11,12,13,19,15,16,20],bare:13,arg:5,reserv:10,"06f1a8dc":10,someth:[11,8,3,20,10],acfd32d233f07cea4b4f37daaacc0082:13,won:10,autogener:10,experi:6,altern:[5,10,6,11,13,20],signatur:[10,15,6],"64575eef70ab90a2b8d55fc09e00440d":13,popup:10,numer:[2,10,6,15,18,8],javascript:[1,4,3,5,14,6,10,11,12,15,17,18],isol:10,mailbox:[15,3],lowercas:[8,6],aserv:20,proplist:4,"30b3b38cdbd9e3a587de9b8122000cff":15,os_daemon:5,miscellan:[10,1,2,9],both:[2,5,10,9,8,20],subtitl:[13,8,14,6],delimit:8,respawn:10,pouchdb:12,cokela:11,context:[1,3,4,10,15,20],collect:[13,2,14,6],let:[3,14,10,11,15,20],corrupt:[14,10],whole:[11,16,14],"_ensure_full_commit":[9,8],load:[13,15,14,4,10],security_:5,dfd39d57:10,simpli:[13,20,5,4,10],point:[4,3,5,14,10,19,15,8],sweet:[5,6],header:[1,4,2,3,5,14,6,7,10,11,13,19,15,17,16],fashion:3,param:16,shutdown:10,suppli:[2,5,14,6,7,13,19,15,18,8],comput:[15,10],not_found:14,"0786321986194c92dd3b57dfbfc741ce":8,hovercraft:14,unsuccess:20,devic:2,"9b2851ed9b6f655cc4eb087808406c60":8,perpetu:2,handle_config_req:18,f726bc4d:10,secret:10,bc0d5aed1e339b1cc1f29578f3220a45:8,imag:[2,3],temporary_view_read:2,holler:15,understand:[15,14],func:4,query_serv:[17,1,18],"_chang":[2,3,10,8,9,6,17,18,16,20],fur:6,look:[3,14,10,11,15,20],meastext:6,"08071a80":10,durat:10,"while":[4,3,5,14,10,6,15,8,20],handle_temp_view_req:18,abov:[2,3,4,5,6,13,19,15,8],error:[1,2,3,5,14,10,15,18,8,20],fun:[4,10],anonym:10,max_dbs_open:[17,18,10],readi:14,readm:[11,10],itself:[13,5],"01afaa4f":10,couchbas:10,minim:13,coffeescript:10,lengthi:10,decod:[11,15],conflict:[2,14,10,8,12,13,16],higher:[13,10],optim:[8,14,10],"_bulk_doc":[9,8,10],alert:16,addeventlisten:16,moment:20,temporari:[2,8,10],user:[0,1,2,3,4,5,6,10,12,13,15,17,8,20],robust:10,"_info":[10,17,18,9,6],recent:[16,8,3,5,10],task:[1,2,3,10,12,19,15],"_list":[10,17,18,9,6],older:10,entri:[2,5,10,6,11,8,20],validatefun:3,o_append:10,pickl:6,commonli:3,propos:3,explan:10,pv19jk:3,xserver:20,from:[4,2,3,5,14,6,7,8,10,12,13,19,15,18,16,20],handle_doc_show_req:18,thorough:10,amp:3,doc_b:20,sidebar:10,revpo:[13,15,10],snappi:10,indexof:[3,4],purge_seq:[8,19,15,6],doc_a:20,shortcut:4,"04cfa4059c48e76e7c054bbe033dd8db":13,appli:[3,10,8,6,17,16,20],input:[15,3,10],subsequ:5,"287a28fa680ae0c7fb4729bf0c6e0cf2":14,bin:[11,18,5],vendor:10,format:[1,4,2,3,5,14,6,7,10,11,13,19,15,18,8],bif:10,aadhrstlmabw8vf08:3,thoma:11,game:15,bit:[13,15],success:[18,2,8,5,10],signal:[15,10],resolv:8,eel:14,"boolean":[2,3,4,14,6,7,8,13,15,16],httpd_status_cod:2,often:14,method_not_allow:14,some:[3,5,14,10,11,19,15,18,8],back:[2,3,5,14,10,11,12,19,15,8,20],"18c3":15,unspecifi:3,sampl:[11,13,19,17,18,8],mirror:11,surpris:10,per:[8,2,3,5,10],attac:6,pem:5,retri:10,larg:[8,10,3,6],max_document_s:[17,18,10],reproduc:19,machin:[15,5,10],plxzyr5jnc1bah4gt:3,run:[4,2,3,5,14,6,10,19,15,18,8,20],reach:[12,8,6],"_uuid":[17,18,2,19,9],revs_info:[7,13,6],step:[11,5,10],stew:[13,8],bff6edf3ca2474a243023f2dad432a5a:6,recorded_seq:[2,15],vand1c8og4vrdoqd8ywgpdydxrgksm5rwu0nqvbjumg:3,idx:3,constraint:5,database_read:2,require_valid_us:[17,18],idl:10,disclosur:10,dialog:10,multipart_form:4,doc_write_failur:[2,15],"168a663b":15,"05361cc6aa42033878acc1bacb1f39c2":8,block:[11,14,5,10],repair:10,within:[10,2,3,5,14,6,9,13,19,17,18,8],ellipsi:19,ensur:[3,5,14,10,6,12,8,20],chang:[1,2,3,5,14,6,8,10,11,12,19,15,16,20],bserver:20,span:10,spam:3,question:[19,3,6],fast:10,custom:[4,3,5,14,6,15,20],includ:[4,2,3,5,14,6,7,8,10,11,13,19,15,16],suit:[19,10],forward:[9,5,10],since_seq:[15,10],url_encoded_form:4,properli:10,repeatedli:5,uri_fil:[17,18],unexpectedli:10,is_admin:4,translat:5,newer:10,atom:[8,5,4,10],mycert:5,mitig:6,info:[1,2,4,10,11,13,15,18,20],coriand:14,"2b4ab67a":10,consist:[19,8],handle_oauth_req:18,caller:[3,10],"5da40eef":10,"41667642f7":10,highlight:10,similar:[8,3,5],parser:[15,10],doesn:[15,20,10],repres:[3,5,14,9,12,15,16,20],"char":15,incomplet:5,guarante:8,couch_httpd_stats_handl:18,curl:[1,5,14,10,19,20],gecko:15,acceptor_pool_s:5,idempot:10,withcredenti:5,database_dir:[17,18],lemon:6,titl:[11,13,8,14,6],sequenti:[13,16],invalid:[2,3,14,10,7,13,8],number10:5,declar:10,preflight:5,tart:6,fc65594ee76087a3b8c726caf5b40687:6,enhanc:10,svn:10,algorithm:[2,5,14,10,12,13,15,17,18],depth:10,far:11,fresh:[8,13,16],hello:[3,4],"921a12f74df0c1052b3e562a23cd227f":5,code:[1,2,3,4,14,6,7,10,11,13,15,18,8],partial:10,edg:10,"2c69":15,scratch:[8,10],mayonnais:6,eventsourc:[2,16,10],descropt:2,secure_rewrit:[17,18],compact:[2,14,10,6,19,15,8],privat:[8,5],sensit:6,peanut:6,base64:[15,13,3],friendli:10,send:[1,2,3,4,14,6,8,10,11,12,13,19,17,16],granular:10,util_driver_dir:[17,18],sens:13,sent:[3,4,5,10,8,16],"_active_task":[2,10,9,12,17,18,8,20],rollback:10,"2a03":15,"3184f9d1ea934e1f81a24c71bde5c168":15,reduce_overflow:10,bulksav:10,implicitli:[15,4,10],privkei:5,dbname:[2,5],tri:10,"_all_doc":[7,9,8,10],dname:3,button:[11,19,10],"try":[11,8,15,5],cooktim:[14,6],race:10,stddev:2,inclusive_end:[10,8,6],rfc2817:5,pleas:[1,5,10],impli:[20,14],smaller:3,natur:[8,14],focu:10,video:14,download:14,b3eb5ac6fbaef4428d712e66483dcb79:8,click:[11,19,10],append:[13,5],compat:[15,10],index:[4,2,3,5,14,6,10,19,15,8],"98515bf0b9":10,undetect:16,c6252d6d7f:10,compar:[12,8,6],cell:19,experiment:[16,5,10],priv:[18,10],leg:10,couch_auth_cach:18,bodi:[3,14,10,19,15,18,8],logout:0,becom:10,sinc:[3,5,10,8,6,11,13,19,15,16,20],end_kei:10,great:8,copyright:15,metro:10,larger:[14,10],converg:3,cert:5,typic:[3,14,5],epilogu:10,honor:[16,14,10],"04cfa71d377aef_i_am_in_yer_couch":13,win:[16,15,10],app:10,"_updat":[17,18,9,6],couchj:[18,10],api:[1,5,14,10,9,6,12,13,19,18,8],wip:4,feb:20,commun:[14,5,10],doubl:[19,15,20,14,10],upgrad:[5,10],urlprefix:10,next:[3,4,5,10,11,19],websit:5,few:15,usr:[18,5],save:[19,8,10],simpler:[13,10],burst:10,start:[10,4,2,3,5,14,6,8,9,11,13,19,15,18,16,20],inet:[5,10],remaind:[8,5,6],sort:[15,10,3,6],"2719fd41187c60762ff584761b714cfb":13,central:2,topic:11,account:[11,5],alik:14,retriev:[4,5,14,10,6,13,18,8],salad:[13,6],scalabl:10,alia:4,alic:15,tag:[15,8,3],obvious:[15,3,16],meet:8,fetch:[10,6],aliv:[16,15],control:[1,2,5,14,10,12,13,17,8],a3544d296de19e6f5b932ea77d886942:8,max_connect:[17,18],process:[4,2,3,5,14,6,10,12,19,15,17,8],lock:10,sudo:10,high:10,carrot:6,tab:10,serial:[15,10],"_db_event":2,lamb:8,ilpzsdxfpchdtdhxeca76aqh:3,gcc:15,sit:5,"_design":[3,4,5,10,9,6,15,17,18,8],occur:[2,10,8,6,19,15,16],brian:8,crlf:8,instead:[8,10,3,14,6],"_replic":[2,10,9,19,17,18,20],circular:10,delai:[10,6],"6ec875e15e6b385120938df18ee8e496":2,overridden:5,watch:8,"5ab712a2":10,tier:6,redund:10,physic:[8,15],drop:10,bind:[5,10],correspond:[2,14,10,6,13,19,15,18],open_os_fil:2,element:[19,8,10,3,6],issu:[10,16,4,6],auth_cache_hit:2,allow:[4,2,3,5,14,6,7,8,10,11,13,19,17,16],jira:5,allow_jsonp:[17,18],move:[12,2,10],optgroup:10,comma:[17,5],server_opt:5,lunchbox:6,"04cfa405fce10b0df4c08f95e667cd2f":13,cookie_authentication_handl:18,chosen:[19,15],whether:[2,6,13,19,15,8],anyon:15,therefor:[19,8,20,14,6],sourcelisten:16,view_read:2,crash:[12,8,5,10],pure:[13,5],handl:[1,3,5,14,10,8,11,15,16],auto:10,spell:10,dai:5,auth:[3,10],mention:20,httpd_request_method:2,source_last_seq:[2,15],handle_uuids_req:18,fingerprint:6,bmveuaaad:3,couch_httpd_rewrit:18,longpol:[8,2,16],anyth:[5,10,9,6,15,8],edit:[19,3,5,4,10],cider:6,couch_httpd_db:18,userctx:[15,3,4],mode:[3,10,16,13,15,8],subset:[20,5],bignum:10,jiffi:15,bump:10,chunk:[3,4,10],handle_design_req:18,meta:[2,8],"static":[13,10,14,5,6],our:[11,15,3],special:[3,4,14,10,8,9,6,19,16,20],out:[2,5,10],variabl:[5,4,10],matrix:5,cleanli:10,req:[3,5,4],rev:[3,14,10,7,8,6,13,19,15,16],stub:[13,15,10],another_replicator_db:20,rel:4,inaccess:10,hardwar:10,dhcp:2,enable_cor:[17,5],ref:10,math:3,common:[15,3,5],end_key_doc_id:10,insid:[11,2,3,14,5],updatefun:3,standalon:10,tempt:15,releas:[1,3,10],shortest:15,complianc:10,"4a01":15,could:[2,3,14,10,8,6,13,15,16,20],timer:10,keep:[2,3,14,10,8,15,16],length:[2,3,5,14,6,13,15,16],cname:5,organis:11,outsid:10,retain:10,timezon:10,localdomain:5,fieldnam:19,suffix:5,bulk_request:2,distcheck:10,allow_persistent_cooki:10,ddocnam:5,lib:[18,4,10],owner:10,journei:3,forgeri:10,"long":[1,2,10,16,19,8],dump:15,strict:14,unknown:10,licens:[11,15,10],mkdir:5,system:[3,5,14,10,19,15,8,20],messag:[4,2,3,5,14,10,8,13,15,16],attach:[1,5,14,10,9,6,13,15,17,18,8],attack:10,termin:11,my_rep_2:20,showfun:3,shell:[19,14,5,10],travi:11,"_replication_id":20,accompani:8,rst:11,exactli:[15,20,5],rss:4,structur:[10,1,2,3,4,14,6,9,11,13,19,15,18,8],charact:[2,3,14,10,9,19,15,8],"52c2370f5027043d286daca4de247db0":2,histor:8,start_tim:[2,15],plaintext:5,by_ingredi:6,linker:10,deprec:10,f7114d4d81124b223283f3e89eee043:6,have:[2,3,5,14,6,7,8,10,11,19,15,16,20],close:[2,10,8,11,12,15,16],max_attachment_chunk_s:[17,18],turn:[11,5],gluten:6,handle_proxy_req:5,min:[2,3],sandbox:4,mix:15,builtin:10,user_ctx:20,"_rewrit":[17,18,9,5,6],datacent:12,singl:[2,3,4,14,10,8,6,19,15,18,16,20],unless:13,l368:4,clash:10,deploy:10,discov:[12,16,5],awk:10,segment:10,why:[15,3],"43ecbd256a70a3a2f7de40d2374b6c3002918834":5,irishfishstew:13,databasea:2,databaseb:2,ocassion:10,url:[0,2,3,5,14,6,7,9,10,11,13,19,15,18,8],"_show":[10,9,6,15,17,18],request:[10,1,2,3,5,14,6,7,8,9,11,13,19,15,18,16],uri:[18,5,10],casserol:6,snapshot:[2,10],determin:[2,10,9,6,11,12,8],occasion:[8,10],"_id":[3,4,6,13,19,15,8,20],fact:[3,20],text:[2,3,4,14,10,6,11,13,15,18],redirect:[5,4,10],bring:[12,10],"7f4a3e05e0cbc6f48a0035e3508eef90":5,nagl:10,"5e55":15,textual:15,locat:[1,2,5,4,10],strtod:15,use_users_db:10,stringifi:[11,15,4],should:[4,2,3,5,14,6,10,11,19,15,18,8],jan:[14,5,20],restructur:11,suppos:20,local:[1,2,4,5,10,7,9,12,19,20],contribut:[11,1,15],notat:14,convert:[5,10],sysv:10,new_edit:10,autom:6,regularli:5,beam:10,suet:8,increas:[13,10,6],tbc:[0,6],tbd:2,user_context:4,enabl:[4,5,14,10,8,12,13,19,17,16],organ:18,missing_check:[2,15],sha:15,integr:[14,5,10],contain:[4,2,3,5,14,6,10,13,15,18,8,20],secobj:[15,3,4],view:[10,1,2,3,4,14,6,7,8,9,11,19,15,17,16,20],frame:4,wdbl:3,packet:10,temporarili:[5,10],multipart:[4,10],closer:12,statu:[1,2,3,14,10,6,12,13,19,15,8],correctli:[3,14,10,6,13,19,8],pattern:[15,5],dll:10,state:[3,14,11,15,8,20],entrant:5,progress:[1,10],neither:5,email:11,bought:5,kei:[10,4,2,3,5,14,6,9,15,17,18,8],remotehost:19,"_purg":[9,8],job:16,entir:[2,3,6,19,18,8],joe:20,exclam:19,mapfun:3,regular:[8,20],addit:[4,3,5,14,10,6,11,13,17,8],extens:3,admin:[2,3,5,10,7,6,13,15,18,8,20],etc:[11,3,5,4,10],instanc:[10,2,5,14,6,7,9,12,19,15,18,8],html5:2,revert:10,authsess:15,rfc2616:5,nodelai:[17,5],respect:[8,10],"1f443f471e5929dd7b252417625ed170":8,"20dlqltj98pdxwmp":3,lambstew:8,yaml:4,addition:12,decent:15,compos:8,compon:[9,19,5],cm9vddo1mdzbrjqzrjrfcuikzprfan:15,treat:[8,14,10],utc_id_suffix:13,mailmap:11,immedi:[8,13,16,20,10],mike:15,cff9e881516483911aa2f0e98949092d:2,f755c413badf66b22941313f9f00332c:13,inbound:5,startkey_docid:[8,6],handle_rewrite_req:18,decim:15,endkei:[10,8,6],d9566c831d:10,behalf:20,"3fab6bb5":10,otp:[13,14,5,10],last:[2,3,5,10,8,6,15,18,16],present:[3,5,14,19,16,20],replic:[1,2,3,10,7,9,12,13,19,15,17,18,8,20],multi:[10,6],acinclud:11,plain:[13,2,14,4,6],defin:[2,3,4,14,10,8,6,15,16,20],a84b2cf:11,cve:10,observ:15,f065cee7c3fd93aa50f6c97acde93030:15,layer:17,helper:4,"8843faaf0b831d364278331bc3001bd8":19,almost:15,demo:19,site:[2,5,4,10],"34e2":15,"093d2aa6":10,prose:11,cdfdda2314:10,tojson:[3,4],mvcc:10,greater:8,welcom:[18,2,19,5,10],parti:[5,6],cross:[17,1,16,5,10],member:[15,5,10],python:[15,14],probabl:[16,3,5],infer:5,overzeal:10,difficult:15,incorrect:[10,7,6,13,18,8],tighten:5,overal:[17,5,10],http:[1,4,2,3,5,14,6,7,8,10,11,13,19,15,17,18,16,20],"67c1bb92010e7abe35d7d629635f18b6":20,keepal:10,effect:[14,6,16,9,8,20],query_server_config:[17,1,18,3],oauth_authentication_handl:18,initi:[12,19,3,4,10],handle_replicate_req:18,"_revs_info":[13,15],decoupl:10,respons:[1,4,2,3,5,14,6,7,8,10,13,15,18,16],expand:10,audit:10,off:5,well:[11,15,4,10],couch_httpd_proxi:5,ivborw0kggoaaaansuheugaaabaaaaaqcamaaaaolq9taaaasv:3,exampl:[10,4,2,3,5,14,6,9,12,13,19,17,18,8,20],command:[3,5,14,10,6,13,19],filesystem:4,undefin:[15,10],audio:14,usual:[11,14],lseek:10,"6d3c30241ba0aaa4e16c6ea99224f915687ed8cd":5,paus:[16,10],less:[14,5,10],obtain:[0,2,6,7,8,13,19,18,16],tcp:[5,10],end_tim:[2,15],tcl:15,heavili:10,web:[1,4,5,14,10,19],"3c24a94d":10,wed:2,cert_fil:5,makefil:[11,10],spinner:10,add:[5,10,11,13,19,8,20],cleanup:10,fishstew:[13,2,8,6],introduct:[1,19],match:[3,5,14,10,7,6,13,8,20],gmt:[4,2,5,14,13,20],branch:[1,10],howto:5,realiz:15,know:[8,15],tick:19,recurs:[5,10],insert:[10,7,6,11,13,8],like:[1,4,2,3,5,14,10,11,15,8,20],onion:6,sofa:5,opera:10,necessari:[19,10],lose:[15,10],async:[15,10],architectur:15,page:[11,19,3,5,10],backlog:5,shed:10,didn:20,password_sha:3,linux:[19,10],"6d912c9f":10,aduki:6,"export":4,proper:10,home:5,librari:[15,19,3,14,10],zmxw:3,trust:5,leaf:16,lead:[3,4,10],leak:10,avoid:[16,3,5,10],octet:14,thank:11,overlap:[15,10],uw8brwmp1h4zxsld6yghuq0f6g4xyqkxvca36mdh6:3,outgo:3,leav:11,mcdonald:10,trap:10,usag:[5,4,10],"869f42e2":10,vhost:[17,1,5,10],host:[1,2,5,14,10,19,15,17],edited_bi:3,although:[15,5],offset:[10,8,2,15,6],after:[2,3,4,5,10,8,6,11,12,19,16,20],panel:19,about:[4,2,3,5,14,6,10,11,13,15,8,20],actual:[15,8,2,3,16],socket:[17,1,2,5,10],column:10,badusernamechar:3,"4oleijlheutv2x6rbno1uqj9g0rmcuqo0vbig4vmfeopcwiwmdow82fzx":3,disabl:[3,4,10],own:[3,20,14,10],c0ebe9256695ff083347cbf95f93e280:20,automat:[10,6,13,19,15,8],due:[15,5,4,10],guard:10,empti:[3,4,10,8,6,15,18,16,20],d953b18035b76f2a5b1d1d93f25d3aea:13,mere:15,merg:10,w3c:[16,5],transfer:12,by_titl:6,appl:[15,6],"var":[3,4,5,11,15,18,16],f755c413badf66b22941313f9f0024ca:13,"function":[1,4,3,5,14,6,8,10,12,19,15,17,18,16,20],interest:[15,3],"_util":[17,18,2,3,9],docs_read:[2,15],spuriou:10,oauth:10,ear:15,bug:[11,10],"031aad7b469956cf2826fcb2a9260492":2,count:[2,3,10,8,19,15,16],succe:3,made:[2,5,14,10,8,15,16],removeeventlisten:16,wish:8,displai:[10,19,5,6],asynchron:10,record:[2,3,10,8,6,13,15,16],below:[0,4,2,3,5,14,6,7,9,13,19,18,8],limit:[3,5,10,7,8,6,16],indefinit:16,cake:6,otherwis:[8,15,4],problem:[8,5,10],irish:13,evalu:[12,14],mask:10,dure:[3,4,5,10,12,8,20],pid:[2,15,10],pie:6,implement:[4,3,5,14,10,12,15,20],ini:[20,16,5,4,10],ing:20,f0d6f19bc8:10,inc:19,mutual:17,acloc:10,detail:[1,2,5,10,7,6,13,19,15,16,20],virtual:[1,5,10],"_replication_":20,futur:11,rememb:[11,19,3,5,10],varieti:14,stat:[17,1,18,10],repeat:10,"_auth":3,df8eca9da37dade42ee4d7aa3401f1dd:3,accomplish:[15,20],stai:16,expando:10,sphinx:[11,10],basic_respond:5,reliabl:10,c2e0085a21d34fa1cecb6dc26a4ae657:3,rule:[9,3,5,10],icu_driv:10,portion:6,"7e3c69ba":10},objtypes:{"0":"js:function","1":"js:data"},titles:["9.7. Authentication Methods","Introduction","9.5. Miscellaneous Methods","6. Design Docs","7. Query Servers","3. Configuration","9.4. Design Document Methods","9.3. Local (non-replicating) Document Methods","9.1. Database Methods","9. API Reference","13. Release History","12. Contributing to this Documentation","4. Replication","9.2. Document Methods","2. API Basics","10. JSON Structure Reference","8. Changes Feed","11. Configuration Reference","9.6. Configuration Methods","1. Introduction","5. Replicator Database"],objnames:{"0":["js","function","JavaScript function"],"1":["js","data","JavaScript data"]},filenames:["api/authn","index","api/misc","ddocs","query-servers","configuring","api/design","api/local","api/database","api/reference","changelog","contributing","replication","api/documents","api-basics","json-structure","changes","config_reference","api/configuration","intro","replicator"]}) \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/_sources/api/authn.txt couchdb-1.4.0~rc.1/share/doc/build/html/_sources/api/authn.txt --- couchdb-1.2.0/share/doc/build/html/_sources/api/authn.txt 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_sources/api/authn.txt 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,41 @@ +.. Licensed under the Apache License, Version 2.0 (the "License"); you may not +.. use this file except in compliance with the License. You may obtain a copy of +.. the License at +.. +.. http://www.apache.org/licenses/LICENSE-2.0 +.. +.. Unless required by applicable law or agreed to in writing, software +.. distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +.. WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +.. License for the specific language governing permissions and limitations under +.. the License. + +====================== +Authentication Methods +====================== + +.. todo:: Authentication Methods + +The CouchDB Authentication methods provide an interface for obtaining +session and authorization data. + +A list of the available methods and URL paths are provided below: + ++--------+-------------------------+-------------------------------------------+ +| Method | Path | Description | ++========+=========================+===========================================+ +| GET | /_oauth/access_token | TBC | ++--------+-------------------------+-------------------------------------------+ +| GET | /_oauth/authorize | TBC | ++--------+-------------------------+-------------------------------------------+ +| POST | /_oauth/authorize | TBC | ++--------+-------------------------+-------------------------------------------+ +| GET | /_oauth/request_token | TBC | ++--------+-------------------------+-------------------------------------------+ +| GET | /_session | Returns cookie based login user | +| | | information | ++--------+-------------------------+-------------------------------------------+ +| POST | /_session | Do cookie based user login | ++--------+-------------------------+-------------------------------------------+ +| DELETE | /_session | Logout cookie based user | ++--------+-------------------------+-------------------------------------------+ diff -Nru couchdb-1.2.0/share/doc/build/html/_sources/api/configuration.txt couchdb-1.4.0~rc.1/share/doc/build/html/_sources/api/configuration.txt --- couchdb-1.2.0/share/doc/build/html/_sources/api/configuration.txt 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_sources/api/configuration.txt 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,297 @@ +.. Licensed under the Apache License, Version 2.0 (the "License"); you may not +.. use this file except in compliance with the License. You may obtain a copy of +.. the License at +.. +.. http://www.apache.org/licenses/LICENSE-2.0 +.. +.. Unless required by applicable law or agreed to in writing, software +.. distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +.. WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +.. License for the specific language governing permissions and limitations under +.. the License. + +.. _api-config: + +===================== +Configuration Methods +===================== + +The CouchDB API Server Configuration Methods provide an interface to +query and update the various configuration values within a running +CouchDB instance. + +A list of the available methods and URL paths are provided below: + ++--------+-------------------------+-------------------------------------------+ +| Method | Path | Description | ++========+=========================+===========================================+ +| GET | /_config | Obtain a list of the entire server | +| | | configuration | ++--------+-------------------------+-------------------------------------------+ +| GET | /_config/section | Get all the configuration values for the | +| | | specified section | ++--------+-------------------------+-------------------------------------------+ +| GET | /_config/section/key | Get a specific section/configuration value| ++--------+-------------------------+-------------------------------------------+ +| PUT | /_config/section/key | Set the specified configuration value | ++--------+-------------------------+-------------------------------------------+ +| DELETE | /_config/section/key | Delete the current setting | ++--------+-------------------------+-------------------------------------------+ + +``GET /_config`` +================ + +* **Method**: ``GET /_config`` +* **Request**: None +* **Response**: Returns a structure configuration name and value pairs, + organized by section +* **Admin Privileges Required**: yes +* **Return Codes**: + + * **200**: + Request completed successfully. + +Returns the entire CouchDB server configuration as a JSON structure. The +structure is organized by different configuration sections, with +individual values. + +For example, to get the configuration for a server: + +.. code-block:: http + + GET http://couchdb:5984/_config + Accept: application/json + +The response is the JSON structure: + +.. code-block:: javascript + + { + "query_server_config" : { + "reduce_limit" : "true" + }, + "couchdb" : { + "os_process_timeout" : "5000", + "max_attachment_chunk_size" : "4294967296", + "max_document_size" : "4294967296", + "uri_file" : "/var/lib/couchdb/couch.uri", + "max_dbs_open" : "100", + "view_index_dir" : "/var/lib/couchdb", + "util_driver_dir" : "/usr/lib64/couchdb/erlang/lib/couch-1.0.1/priv/lib", + "database_dir" : "/var/lib/couchdb", + "delayed_commits" : "true" + }, + "attachments" : { + "compressible_types" : "text/*, application/javascript, application/json, application/xml", + "compression_level" : "8" + }, + "uuids" : { + "algorithm" : "utc_random" + }, + "daemons" : { + "view_manager" : "{couch_view, start_link, []}", + "auth_cache" : "{couch_auth_cache, start_link, []}", + "uuids" : "{couch_uuids, start, []}", + "stats_aggregator" : "{couch_stats_aggregator, start, []}", + "query_servers" : "{couch_query_servers, start_link, []}", + "httpd" : "{couch_httpd, start_link, []}", + "stats_collector" : "{couch_stats_collector, start, []}", + "db_update_notifier" : "{couch_db_update_notifier_sup, start_link, []}", + "external_manager" : "{couch_external_manager, start_link, []}" + }, + "stats" : { + "samples" : "[0, 60, 300, 900]", + "rate" : "1000" + }, + "httpd" : { + "vhost_global_handlers" : "_utils, _uuids, _session, _oauth, _users", + "secure_rewrites" : "true", + "authentication_handlers" : "{couch_httpd_oauth, oauth_authentication_handler}, + {couch_httpd_auth, cookie_authentication_handler}, + {couch_httpd_auth, default_authentication_handler}", + "port" : "5984", + "default_handler" : "{couch_httpd_db, handle_request}", + "allow_jsonp" : "false", + "bind_address" : "192.168.0.2", + "max_connections" : "2048" + }, + "query_servers" : { + "javascript" : "/usr/bin/couchjs /usr/share/couchdb/server/main.js" + }, + "couch_httpd_auth" : { + "authentication_db" : "_users", + "require_valid_user" : "false", + "authentication_redirect" : "/_utils/session.html", + "timeout" : "600", + "auth_cache_size" : "50" + }, + "httpd_db_handlers" : { + "_design" : "{couch_httpd_db, handle_design_req}", + "_compact" : "{couch_httpd_db, handle_compact_req}", + "_view_cleanup" : "{couch_httpd_db, handle_view_cleanup_req}", + "_temp_view" : "{couch_httpd_view, handle_temp_view_req}", + "_changes" : "{couch_httpd_db, handle_changes_req}" + }, + "replicator" : { + "max_http_sessions" : "10", + "max_http_pipeline_size" : "10" + }, + "log" : { + "include_sasl" : "true", + "level" : "info", + "file" : "/var/log/couchdb/couch.log" + }, + "httpd_design_handlers" : { + "_update" : "{couch_httpd_show, handle_doc_update_req}", + "_show" : "{couch_httpd_show, handle_doc_show_req}", + "_info" : "{couch_httpd_db, handle_design_info_req}", + "_list" : "{couch_httpd_show, handle_view_list_req}", + "_view" : "{couch_httpd_view, handle_view_req}", + "_rewrite" : "{couch_httpd_rewrite, handle_rewrite_req}" + }, + "httpd_global_handlers" : { + "_replicate" : "{couch_httpd_misc_handlers, handle_replicate_req}", + "/" : "{couch_httpd_misc_handlers, handle_welcome_req, <<\"Welcome\">>}", + "_config" : "{couch_httpd_misc_handlers, handle_config_req}", + "_utils" : "{couch_httpd_misc_handlers, handle_utils_dir_req, \"/usr/share/couchdb/www\"}", + "_active_tasks" : "{couch_httpd_misc_handlers, handle_task_status_req}", + "_session" : "{couch_httpd_auth, handle_session_req}", + "_log" : "{couch_httpd_misc_handlers, handle_log_req}", + "favicon.ico" : "{couch_httpd_misc_handlers, handle_favicon_req, \"/usr/share/couchdb/www\"}", + "_all_dbs" : "{couch_httpd_misc_handlers, handle_all_dbs_req}", + "_oauth" : "{couch_httpd_oauth, handle_oauth_req}", + "_restart" : "{couch_httpd_misc_handlers, handle_restart_req}", + "_uuids" : "{couch_httpd_misc_handlers, handle_uuids_req}", + "_stats" : "{couch_httpd_stats_handlers, handle_stats_req}" + } + } + + +``GET /_config/section`` +======================== + +* **Method**: ``GET /_config/section`` +* **Request**: None +* **Response**: All the configuration values within a specified section +* **Admin Privileges Required**: yes +* **Return Codes**: + + * **200**: + Request completed successfully. + +Gets the configuration structure for a single section. For example, to +retrieve the CouchDB configuration section values: + +.. code-block:: http + + GET http://couchdb:5984/_config/couchdb + Accept: application/json + +The returned JSON contains just the configuration values for this +section: + +.. code-block:: javascript + + { + "os_process_timeout" : "5000", + "max_attachment_chunk_size" : "4294967296", + "max_document_size" : "4294967296", + "uri_file" : "/var/lib/couchdb/couch.uri", + "max_dbs_open" : "100", + "view_index_dir" : "/var/lib/couchdb", + "util_driver_dir" : "/usr/lib64/couchdb/erlang/lib/couch-1.0.1/priv/lib", + "database_dir" : "/var/lib/couchdb", + "delayed_commits" : "true" + } + +``GET /_config/section/key`` +============================ + +* **Method**: ``GET /_config/section/key`` +* **Request**: None +* **Response**: Value of the specified key/section +* **Admin Privileges Required**: yes +* **Return Codes**: + + * **200**: + Request completed successfully. + +Gets a single configuration value from within a specific configuration +section. For example, to obtain the current log level: + +.. code-block:: http + + GET http://couchdb:5984/_config/log/level + Accept: application/json + +Returns the string of the log level: + +.. code-block:: javascript + + "info" + +.. note:: + The returned value will be the JSON of the value, which may be a + string or numeric value, or an array or object. Some client + environments may not parse simple strings or numeric values as valid JSON. + +.. _api-put-config: + +``PUT /_config/section/key`` +============================ + +* **Method**: ``PUT /_config/section/key`` +* **Request**: Value structure +* **Response**: Previous value +* **Admin Privileges Required**: yes +* **Return Codes**: + + * **200**: + Configuration option updated successfully + + * **500**: + Error setting configuration + +Updates a configuration value. The new value should be supplied in the +request body in the corresponding JSON format. For example, if you are +setting a string value, you must supply a valid JSON string. + +For example, to set the function used to generate UUIDs by the +``GET /_uuids`` API call to use the ``utc_random`` generator: + +.. code-block:: http + + PUT http://couchdb:5984/_config/uuids/algorithm + Content-Type: application/json + + "utc_random" + +The return value will be empty, with the response code indicating the +success or failure of the configuration setting. + +``DELETE /_config/section/key`` +=============================== + +* **Method**: ``DELETE /_config/section/key`` +* **Request**: None +* **Response**: Previous value +* **Admin Privileges Required**: yes +* **Return Codes**: + + * **409**: + Supplied revision is incorrect or missing + +Deletes a configuration value. The returned JSON will be the value of +the configuration parameter before it was deleted. For example, to +delete the UUID parameter: + +.. code-block:: http + + DELETE http://couchdb:5984/_config/uuids/algorithm + Content-Type: application/json + +The returned value is the last configured UUID function: + +.. code-block:: javascript + + "random" diff -Nru couchdb-1.2.0/share/doc/build/html/_sources/api/database.txt couchdb-1.4.0~rc.1/share/doc/build/html/_sources/api/database.txt --- couchdb-1.2.0/share/doc/build/html/_sources/api/database.txt 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_sources/api/database.txt 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,1471 @@ +.. Licensed under the Apache License, Version 2.0 (the "License"); you may not +.. use this file except in compliance with the License. You may obtain a copy of +.. the License at +.. +.. http://www.apache.org/licenses/LICENSE-2.0 +.. +.. Unless required by applicable law or agreed to in writing, software +.. distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +.. WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +.. License for the specific language governing permissions and limitations under +.. the License. + +.. _api-db: + +================ +Database Methods +================ + +The Database methods provide an interface to an entire database withing +CouchDB. These are database, rather than document, level requests. + +A list of the available methods and URL paths are provided below: + ++--------+-------------------------+-------------------------------------------+ +| Method | Path | Description | ++========+=========================+===========================================+ +| GET | /db | Returns database information | ++--------+-------------------------+-------------------------------------------+ +| PUT | /db | Create a new database | ++--------+-------------------------+-------------------------------------------+ +| DELETE | /db | Delete an existing database | ++--------+-------------------------+-------------------------------------------+ +| GET | /db/_all_docs | Returns a built-in view of all documents | +| | | in this database | ++--------+-------------------------+-------------------------------------------+ +| POST | /db/_all_docs | Returns certain rows from the built-in | +| | | view of all documents | ++--------+-------------------------+-------------------------------------------+ +| POST | /db/_bulk_docs | Insert multiple documents in to the | +| | | database in a single request | ++--------+-------------------------+-------------------------------------------+ +| GET | /db/_changes | Returns changes for the given database | ++--------+-------------------------+-------------------------------------------+ +| POST | /db/_compact | Starts a compaction for the database | ++--------+-------------------------+-------------------------------------------+ +| POST | /db/_compact/design-doc | Starts a compaction for all the views in | +| | | the selected design document | ++--------+-------------------------+-------------------------------------------+ +| POST | /db/_ensure_full_commit | Makes sure all uncommitted changes are | +| | | written and synchronized to the disk | ++--------+-------------------------+-------------------------------------------+ +| POST | /db/_missing_revs | Given a list of document revisions, | +| | | returns the document revisions that do not| +| | | exist in the database | ++--------+-------------------------+-------------------------------------------+ +| POST | /db/_purge | Purge some historical documents entirely | +| | | from database history | ++--------+-------------------------+-------------------------------------------+ +| POST | /db/_revs_diff | Given a list of document revisions, | +| | | returns differences between the given | +| | | revisions and ones that are in the | +| | | database | ++--------+-------------------------+-------------------------------------------+ +| GET | /db/_revs_limit | Gets the limit of historical revisions to | +| | | store for a single document in the | +| | | database | ++--------+-------------------------+-------------------------------------------+ +| PUT | /db/_revs_limit | Sets the limit of historical revisions to | +| | | store for a single document in the | +| | | database | ++--------+-------------------------+-------------------------------------------+ +| GET | /db/_security | Returns the special security object for | +| | | the database | ++--------+-------------------------+-------------------------------------------+ +| PUT | /db/_security | Sets the special security object for the | +| | | database | ++--------+-------------------------+-------------------------------------------+ +| POST | /db/_temp_view | Execute a given view function for all | +| | | documents and return the result | ++--------+-------------------------+-------------------------------------------+ +| POST | /db/_view_cleanup | Removes view files that are not used by | +| | | any design document | ++--------+-------------------------+-------------------------------------------+ + +For all the database methods, the database name within the URL path +should be the database name that you wish to perform the operation on. +For example, to obtain the meta information for the database +``recipes``, you would use the HTTP request: + +.. code-block:: http + + GET /recipes + +For clarity, the form below is used in the URL paths: + +.. code-block:: http + + GET /db + +Where ``db`` is the name of any database. + +.. _api-get-db: + +``GET /db`` +=========== + +* **Method**: ``GET /db`` +* **Request**: None +* **Response**: Information about the database in JSON format +* **Admin Privileges Required**: no +* **Return Codes**: + + * **404**: + The requested content could not be found. The returned content will include + further information, as a JSON object, if available. + +Gets information about the specified database. For example, to retrieve +the information for the database ``recipe``: + +.. code-block:: http + + GET http://couchdb:5984/recipes + Accept: application/json + +The JSON response contains meta information about the database. A sample +of the JSON returned for an empty database is provided below: + +.. code-block:: javascript + + { + "compact_running" : false, + "committed_update_seq" : 375048, + "disk_format_version" : 5, + "disk_size" : 33153123, + "doc_count" : 18386, + "doc_del_count" : 0, + "db_name" : "recipes", + "instance_start_time" : "1290700340925570", + "purge_seq" : 10, + "update_seq" : 375048 + } + + +The elements of the returned structure are shown in the table below: + ++----------------------------------+-------------------------------------------+ +| Field | Description | ++==================================+===========================================+ +| committed_update_seq | The number of committed update. | ++----------------------------------+-------------------------------------------+ +| compact_running | Set to true if the database compaction | +| | routine is operating on this database. | ++----------------------------------+-------------------------------------------+ +| db_name | The name of the database. | ++----------------------------------+-------------------------------------------+ +| disk_format_version | The version of the physical format used | +| | for the data when it is stored on disk. | ++----------------------------------+-------------------------------------------+ +| disk_size | Size in bytes of the data as stored on the| +| | disk. Views indexes are not included in | +| | the calculation. | ++----------------------------------+-------------------------------------------+ +| doc_count | A count of the documents in the specified | +| | database. | ++----------------------------------+-------------------------------------------+ +| doc_del_count | Number of deleted documents | ++----------------------------------+-------------------------------------------+ +| instance_start_time | Timestamp of when the database was | +| | opened, expressed in microseconds since | +| | the epoch. | ++----------------------------------+-------------------------------------------+ +| purge_seq | The number of purge operations on the | +| | database. | ++----------------------------------+-------------------------------------------+ +| update_seq | The current number of updates to the | +| | database. | ++----------------------------------+-------------------------------------------+ + +``PUT /db`` +=========== + +* **Method**: ``PUT /db`` +* **Request**: None +* **Response**: JSON success statement +* **Admin Privileges Required**: no +* **Return Codes**: + + * **400**: + Invalid database name + + * **412**: + Database already exists + +Creates a new database. The database name must be composed of one or +more of the following characters: + +- Lowercase characters (``a-z``) + +- Name must begin with a lowercase letter + +- Digits (``0-9``) + +- Any of the characters ``_``, ``$``, ``(``, ``)``, ``+``, ``-``, and + ``/``. + +Trying to create a database that does not meet these requirements will +return an error quoting these restrictions. + +To create the database ``recipes``: + +.. code-block:: http + + PUT http://couchdb:5984/recipes + Content-Type: application/json + +The returned content contains the JSON status: + +.. code-block:: javascript + + { + "ok" : true + } + +Anything should be treated as an error, and the problem should be taken +form the HTTP response code. + +``DELETE /db`` +============== + +* **Method**: ``DELETE /db`` +* **Request**: None +* **Response**: JSON success statement +* **Admin Privileges Required**: no +* **Return Codes**: + + * **200**: + Database has been deleted + + * **404**: + The requested content could not be found. The returned content will include + further information, as a JSON object, if available. + +Deletes the specified database, and all the documents and attachments +contained within it. + +To delete the database ``recipes`` you would send the request: + +.. code-block:: http + + DELETE http://couchdb:5984/recipes + Content-Type: application/json + +If successful, the returned JSON will indicate success + +.. code-block:: javascript + + { + "ok" : true + } + +.. _api-changes: + +``GET /db/_changes`` +==================== + +* **Method**: ``GET /db/_changes`` +* **Request**: None +* **Response**: JSON success statement +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: doc_ids + + * **Description**: Specify the list of documents IDs to be filtered + * **Optional**: yes + * **Type**: json + * **Default**: none + + * **Argument**: feed + + * **Description**: Type of feed + * **Optional**: yes + * **Type**: string + * **Default**: normal + * **Supported Values**: + + * **continuous**: Continuous (non-polling) mode + * **longpoll**: Long polling mode + * **normal**: Normal mode + + * **Argument**: filter + + * **Description**: Filter function from a design document to get updates + * **Optional**: yes + * **Type**: string + * **Default**: none + * **Supported Values**: + + * **Argument**: heartbeat + + * **Description**: Period after which an empty line is sent during longpoll + or continuous + * **Optional**: yes + * **Type**: numeric + * **Default**: 60000 + * **Quantity**: milliseconds + + * **Argument**: include_docs + + * **Description**: Include the document with the result + * **Optional**: yes + * **Type**: boolean + * **Default**: false + + * **Argument**: limit + + * **Description**: Maximum number of rows rows to return + * **Optional**: yes + * **Type**: numeric + * **Default**: none + + * **Argument**: since + + * **Description**: Start the results from changes immediately after the + specified sequence number + * **Optional**: yes + * **Type**: numeric + * **Default**: 0 + +Obtains a list of the changes made to the database. This can be used to +monitor for update and modifications to the database for post processing +or synchronization. There are three different types of supported changes +feeds, poll, longpoll, and continuous. All requests are poll requests by +default. You can select any feed type explicitly using the ``feed`` +query argument. + +- **Poll** + + With polling you can request the changes that have occured since a + specific sequence number. This returns the JSON structure containing + the changed document information. When you perform a poll change + request, only the changes since the specific sequence number are + returned. For example, the query + + .. code-block:: http + + DELETE http://couchdb:5984/recipes/_changes + Content-Type: application/json + + Will get all of the changes in the database. You can request a + starting point using the ``since`` query argument and specifying the + sequence number. You will need to record the latest sequence number + in your client and then use this when making another request as the + new value to the ``since`` parameter. + +- **Longpoll** + + With long polling the request to the server will remain open until a + change is made on the database, when the changes will be reported, + and then the connection will close. The long poll is useful when you + want to monitor for changes for a specific purpose without wanting to + monitoring continuously for changes. + + Because the wait for a change can be significant you can set a + timeout before the connection is automatically closed (the + ``timeout`` argument). You can also set a heartbeat interval (using + the ``heartbeat`` query argument), which sends a newline to keep the + connection open. + +- **Continuous** + + Continuous sends all new changes back to the client immediately, + without closing the connection. In continuous mode the format of the + changes is slightly different to accommodate the continuous nature + while ensuring that the JSON output is still valid for each change + notification. + + As with the longpoll feed type you can set both the timeout and + heartbeat intervals to ensure that the connection is kept open for + new changes and updates. + +The return structure for ``normal`` and ``longpoll`` modes is a JSON +array of changes objects, and the last update sequence number. The +structure is described in the following table. + ++----------------------------------+-------------------------------------------+ +| Field | Description | ++==================================+===========================================+ +| last_seq | Last change sequence number. | ++----------------------------------+-------------------------------------------+ +| results [array] | Changes made to a database | ++----------------------------------+-------------------------------------------+ +| changes [array] | List of changes, field-by-field, for this | +| | document | ++----------------------------------+-------------------------------------------+ +| id | Document ID | ++----------------------------------+-------------------------------------------+ +| seq | Update sequence number | ++----------------------------------+-------------------------------------------+ + +The return format for ``continuous`` mode the server sends a ``CRLF`` +(carriage-return, linefeed) delimited line for each change. Each line +contains the `JSON object`_. + +You can also request the full contents of each document change (instead +of just the change notification) by using the ``include_docs`` +parameter. + +Filtering +--------- + +You can filter the contents of the changes feed in a number of ways. The +most basic way is to specify one or more document IDs to the query. This +causes the returned structure value to only contain changes for the +specified IDs. Note that the value of this query argument should be a +JSON formatted array. + +You can also filter the ``_changes`` feed by defining a filter function +within a design document. The specification for the filter is the same +as for replication filters. You specify the name of the filter function +to the ``filter`` parameter, specifying the design document name and +filter name. For example: + +.. code-block:: http + + GET /db/_changes?filter=design_doc/filtername + +The ``_changes`` feed can be used to watch changes to specific document +ID's or the list of ``_design`` documents in a database. If the +``filters`` parameter is set to ``_doc_ids`` a list of doc IDs can be +passed in the ``doc_ids`` parameter as a JSON array. For more +information, see :ref:`changes`. + +.. _api-compact: + +``POST /db/_compact`` +===================== + +* **Method**: ``POST /db/_compact`` +* **Request**: None +* **Response**: JSON success statement +* **Admin Privileges Required**: yes +* **Return Codes**: + + * **202**: + Compaction request has been accepted + + * **404**: + The requested content could not be found. The returned content will include + further information, as a JSON object, if available. + +Request compaction of the specified database. Compaction compresses the +disk database file by performing the following operations: + +- Writes a new version of the database file, removing any unused + sections from the new version during write. Because a new file is + temporary created for this purpose, you will need twice the current + storage space of the specified database in order for the compaction + routine to complete. + +- Removes old revisions of documents from the database, up to the + per-database limit specified by the ``_revs_limit`` database + parameter. See :ref:`api-get-db`. + +Compaction can only be requested on an individual database; you cannot +compact all the databases for a CouchDB instance. The compaction process +runs as a background process. + +You can determine if the compaction process is operating on a database +by obtaining the database meta information, the ``compact_running`` +value of the returned database structure will be set to true. See +:ref:`api-get-db`. + +You can also obtain a list of running processes to determine whether +compaction is currently running. See :ref:`active-tasks`. + +.. _api-compact-ddoc: + +``POST /db/_compact/design-doc`` +================================ + +* **Method**: ``POST /db/_compact/design-doc`` +* **Request**: None +* **Response**: JSON success statement +* **Admin Privileges Required**: yes +* **Return Codes**: + + * **202**: + Compaction request has been accepted + + * **404**: + The requested content could not be found. The returned content will include + further information, as a JSON object, if available. + +Compacts the view indexes associated with the specified design document. +You can use this in place of the full database compaction if you know a +specific set of view indexes have been affected by a recent database +change. + +For example, to compact the views associated with the ``recipes`` design +document: + +.. code-block:: http + + POST http://couchdb:5984/recipes/_compact/recipes + Content-Type: application/json + +CouchDB will immediately return with a status indicating that the +compaction request has been received (HTTP status code 202): + +.. code-block:: javascript + + { + "ok" : true + } + + +``POST /db/_view_cleanup`` +========================== + +* **Method**: ``POST /db/_view_cleanup`` +* **Request**: None +* **Response**: JSON success statement +* **Admin Privileges Required**: yes + +Cleans up the cached view output on disk for a given view. For example: + +.. code-block:: http + + POST http://couchdb:5984/recipes/_view_cleanup + Content-Type: application/json + +If the request is successful, a basic status message us returned: + +.. code-block:: javascript + + { + "ok" : true + } + + +``POST /db/_ensure_full_commit`` +================================ + +* **Method**: ``POST /db/_ensure_full_commit`` +* **Request**: None +* **Response**: JSON success statement +* **Admin Privileges Required**: no +* **Return Codes**: + + * **202**: + Commit completed successfully + + * **404**: + The requested content could not be found. The returned content will include + further information, as a JSON object, if available. + + +Commits any recent changes to the specified database to disk. You should +call this if you want to ensure that recent changes have been written. +For example, to commit all the changes to disk for the database +``recipes`` you would use: + +.. code-block:: http + + POST http://couchdb:5984/recipes/_ensure_full_commit + Content-Type: application/json + +This returns a status message, containing the success message and the +timestamp for when the CouchDB instance was started: + +.. code-block:: javascript + + { + "ok" : true, + "instance_start_time" : "1288186189373361" + } + +``POST /db/_bulk_docs`` +======================= + +* **Method**: ``POST /db/_bulk_docs`` +* **Request**: JSON of the docs and updates to be applied +* **Response**: JSON success statement +* **Admin Privileges Required**: no +* **Return Codes**: + + * **201**: + Document(s) have been created or updated + +The bulk document API allows you to create and update multiple documents +at the same time within a single request. The basic operation is similar +to creating or updating a single document, except that you batch the +document structure and information and . When creating new documents the +document ID is optional. For updating existing documents, you must +provide the document ID, revision information, and new document values. + +For both inserts and updates the basic structure of the JSON is the +same: + ++----------------------------------+-------------------------------------------+ +| Field | Description | ++==================================+===========================================+ +| all_or_nothing (optional) | Sets the database commit mode to use | +| | all-or-nothing semantics | ++----------------------------------+-------------------------------------------+ +| docs [array] | Bulk Documents Document | ++----------------------------------+-------------------------------------------+ +| _id (optional) | List of changes, field-by-field, for this | +| | document | ++----------------------------------+-------------------------------------------+ +| _rev (optional) | Document ID | ++----------------------------------+-------------------------------------------+ +| _deleted (optional) | Update sequence number | ++----------------------------------+-------------------------------------------+ + +Inserting Documents in Bulk +--------------------------- + +To insert documents in bulk into a database you need to supply a JSON +structure with the array of documents that you want to add to the +database. Using this method you can either include a document ID, or +allow the document ID to be automatically generated. + +For example, the following inserts three new documents, two with the +supplied document IDs, and one which will have a document ID generated: + +.. code-block:: javascript + + { + "docs" : [ + { + "_id" : "FishStew", + "servings" : 4, + "subtitle" : "Delicious with fresh bread", + "title" : "Fish Stew" + }, + { + "_id" : "LambStew", + "servings" : 6, + "subtitle" : "Delicious with scone topping", + "title" : "Lamb Stew" + }, + { + "servings" : 8, + "subtitle" : "Delicious with suet dumplings", + "title" : "Beef Stew" + }, + ] + } + + +The return type from a bulk insertion will be 201, with the content of +the returned structure indicating specific success or otherwise messages +on a per-document basis. + +The return structure from the example above contains a list of the +documents created, here with the combination and their revision IDs: + +.. code-block:: http + + POST http://couchdb:5984/recipes/_bulk_docs + Content-Type: application/json + + [ + { + "id" : "FishStew", + "rev" : "1-9c65296036141e575d32ba9c034dd3ee", + }, + { + "id" : "LambStew", + "rev" : "1-34c318924a8f327223eed702ddfdc66d", + }, + { + "id" : "7f7638c86173eb440b8890839ff35433", + "rev" : "1-857c7cbeb6c8dd1dd34a0c73e8da3c44", + } + ] + + +The content and structure of the returned JSON will depend on the transaction +semantics being used for the bulk update; see :ref:`bulk-semantics` for more +information. Conflicts and validation errors when updating documents in +bulk must be handled separately; see :ref:`bulk-validation`. + +Updating Documents in Bulk +-------------------------- + +The bulk document update procedure is similar to the insertion +procedure, except that you must specify the document ID and current +revision for every document in the bulk update JSON string. + +For example, you could send the following request: + +.. code-block:: http + + POST http://couchdb:5984/recipes/_bulk_docs + Content-Type: application/json + + { + "docs" : [ + { + "_id" : "FishStew", + "_rev" : "1-9c65296036141e575d32ba9c034dd3ee", + "servings" : 4, + "subtitle" : "Delicious with freshly baked bread", + "title" : "Fish Stew" + }, + { + "_id" : "LambStew", + "_rev" : "1-34c318924a8f327223eed702ddfdc66d", + "servings" : 6, + "subtitle" : "Serve with a wholemeal scone topping", + "title" : "Lamb Stew" + }, + { + "_id" : "7f7638c86173eb440b8890839ff35433" + "_rev" : "1-857c7cbeb6c8dd1dd34a0c73e8da3c44", + "servings" : 8, + "subtitle" : "Hand-made dumplings make a great accompaniment", + "title" : "Beef Stew" + } + ] + } + +The return structure is the JSON of the updated documents, with the new +revision and ID information: + +.. code-block:: javascript + + [ + { + "id" : "FishStew", + "rev" : "2-e7af4c4e9981d960ecf78605d79b06d1" + }, + { + "id" : "LambStew", + "rev" : "2-0786321986194c92dd3b57dfbfc741ce" + }, + { + "id" : "7f7638c86173eb440b8890839ff35433", + "rev" : "2-bdd3bf3563bee516b96885a66c743f8e" + } + ] + +You can optionally delete documents during a bulk update by adding the +``_deleted`` field with a value of ``true`` to each document ID/revision +combination within the submitted JSON structure. + +The return type from a bulk insertion will be 201, with the content of +the returned structure indicating specific success or otherwise messages +on a per-document basis. + +The content and structure of the returned JSON will depend on the transaction +semantics being used for the bulk update; see :ref:`bulk-semantics` for more +information. Conflicts and validation errors when updating documents in +bulk must be handled separately; see :ref:`bulk-validation`. + +.. _bulk-semantics: + +Bulk Documents Transaction Semantics +------------------------------------ + +CouchDB supports two different modes for updating (or inserting) +documents using the bulk documentation system. Each mode affects both +the state of the documents in the event of system failure, and the level +of conflict checking performed on each document. The two modes are: + +- ``non-atomic`` + + The default mode is non-atomic, that is, CouchDB will only guarantee + that some of the documents will be saved when you send the request. + The response will contain the list of documents successfully inserted + or updated during the process. In the event of a crash, some of the + documents may have been successfully saved, and some will have been + lost. + + In this mode, the response structure will indicate whether the + document was updated by supplying the new ``_rev`` parameter + indicating a new document revision was created. If the update failed, + then you will get an ``error`` of type ``conflict``. For example: + + .. code-block:: javascript + + [ + { + "id" : "FishStew", + "error" : "conflict", + "reason" : "Document update conflict." + }, + { + "id" : "LambStew", + "error" : "conflict", + "reason" : "Document update conflict." + }, + { + "id" : "7f7638c86173eb440b8890839ff35433", + "error" : "conflict", + "reason" : "Document update conflict." + } + ] + + + In this case no new revision has been created and you will need to + submit the document update, with the correct revision tag, to update + the document. + +- ``all-or-nothing`` + + In all-or-nothing mode, either all documents are written to the + database, or no documents are written to the database, in the event + of a system failure during commit. + + In addition, the per-document conflict checking is not performed. + Instead a new revision of the document is created, even if the new + revision is in conflict with the current revision in the database. + The returned structure contains the list of documents with new + revisions: + + .. code-block:: javascript + + [ + { + "id" : "FishStew", + "rev" : "2-e7af4c4e9981d960ecf78605d79b06d1" + }, + { + "id" : "LambStew", + "rev" : "2-0786321986194c92dd3b57dfbfc741ce" + }, + { + "id" : "7f7638c86173eb440b8890839ff35433", + "rev" : "2-bdd3bf3563bee516b96885a66c743f8e" + } + ] + + When updating documents using this mode the revision of a document + included in views will be arbitrary. You can check the conflict + status for a document by using the ``conflicts=true`` query argument + when accessing the view. Conflicts should be handled individually to + ensure the consistency of your database. + + To use this mode, you must include the ``all_or_nothing`` field (set + to true) within the main body of the JSON of the request. + +The effects of different database operations on the different modes are +summarized below: + +* **Transaction Mode**: ``Non-atomic`` + + * **Transaction**: ``Insert`` + + * **Cause**: Requested document ID already exists + * **Resolution**: Resubmit with different document ID, or update the + existing document + + * **Transaction**: ``Update`` + + * **Cause**: Revision missing or incorrect + * **Resolution**: Resubmit with correct revision + +* **Transaction Mode**: ``All-or-nothing`` + + * **Transaction**: ``Insert`` / ``Update`` + + * **Cause**: Additional revision inserted + * **Resolution**: Resolve conflicted revisions + +Replication of documents is independent of the type of insert or update. +The documents and revisions created during a bulk insert or update are +replicated in the same way as any other document. This can mean that if +you make use of the all-or-nothing mode the exact list of documents, +revisions (and their conflict state) may or may not be replicated to +other databases correctly. + +.. _bulk-validation: + +Bulk Document Validation and Conflict Errors +-------------------------------------------- + +The JSON returned by the ``_bulk_docs`` operation consists of an array +of JSON structures, one for each document in the original submission. +The returned JSON structure should be examined to ensure that all of the +documents submitted in the original request were successfully added to +the database. + +The exact structure of the returned information is: + ++----------------------------------+-------------------------------------------+ +| Field | Description | ++==================================+===========================================+ +| docs [array] | Bulk Documents Document | ++----------------------------------+-------------------------------------------+ +| id | Document ID | ++----------------------------------+-------------------------------------------+ +| error | Error type | ++----------------------------------+-------------------------------------------+ +| reason | Error string with extended reason | ++----------------------------------+-------------------------------------------+ + +When a document (or document revision) is not correctly committed to the +database because of an error, you should check the ``error`` field to +determine error type and course of action. Errors will be one of the +following type: + +- ``conflict`` + + The document as submitted is in conflict. If you used the default + bulk transaction mode then the new revision will not have been + created and you will need to re-submit the document to the database. + If you used ``all-or-nothing`` mode then you will need to manually + resolve the conflicted revisions of the document. + + Conflict resolution of documents added using the bulk docs interface + is identical to the resolution procedures used when resolving + conflict errors during replication. + +- ``forbidden`` + + Entries with this error type indicate that the validation routine + applied to the document during submission has returned an error. + + For example, if your validation routine includes the following: + + .. code-block:: javascript + + throw({forbidden: 'invalid recipe ingredient'}); + + The error returned will be: + + .. code-block:: javascript + + { + "id" : "7f7638c86173eb440b8890839ff35433", + "error" : "forbidden", + "reason" : "invalid recipe ingredient" + } + + +``POST /db/_temp_view`` +======================= + +* **Method**: ``POST /db/_temp_view`` +* **Request**: JSON with the temporary view definition +* **Response**: Temporary view result set +* **Admin Privileges Required**: yes + +Creates (and executes) a temporary view based on the view function +supplied in the JSON request. For example: + +.. code-block:: http + + POST http://couchdb:5984/recipes/_temp_view + Content-Type: application/json + + { + "map" : "function(doc) { if (doc.value > 9995) { emit(null, doc.value); } }" + } + +The resulting JSON response is the result from the execution of the +temporary view: + +.. code-block:: javascript + + { + "total_rows" : 3, + "rows" : [ + { + "value" : 9998.41913029012, + "id" : "05361cc6aa42033878acc1bacb1f39c2", + "key" : null + }, + { + "value" : 9998.94149934853, + "id" : "1f443f471e5929dd7b252417625ed170", + "key" : null + }, + { + "value" : 9998.01511339154, + "id" : "1f443f471e5929dd7b252417629c102b", + "key" : null + } + ], + "offset" : 0 + } + +The arguments also available to standard view requests also apply to +temporary views, but the execution of the view may take some time as it +relies on being executed at the time of the request. In addition to the +time taken, they are also computationally very expensive to produce. You +should use a defined view if you want to achieve the best performance. + +``POST /db/_purge`` +=================== + +* **Method**: ``POST /db/_purge`` +* **Request**: JSON of the document IDs/revisions to be purged +* **Response**: JSON structure with purged documents and purge sequence +* **Admin Privileges Required**: no + +Occasionally something into ends up in your database that should never have +written to it, like a password or private key. Purge can help you here. + +A database purge permanently removes the references to deleted documents +from the database. Deleting a document within CouchDB does not actually +remove the document from the database, instead, the document is marked as +a deleted (and a new revision is created). This is to ensure that +deleted documents are replicated to other databases as having been +deleted. This also means that you can check the status of a document and +identify that the document has been deleted. + +.. note:: + + Don't use purge as a regular operation, it is designed to be used + in exceptional cases. + +The purge operation removes the references to the deleted documents from +the database. The purging of old documents is not replicated to other +databases. If you are replicating between databases and have deleted a +large number of documents you should run purge on each database. + +.. note:: + + Purging documents does not remove the space used by them on disk. To + reclaim disk space, you should run a database compact (see + :ref:`api-compact`), and compact views (see :ref:`api-compact-ddoc`). + +To perform a purge operation you must send a request including the JSON +of the document IDs that you want to purge. For example: + +.. code-block:: http + + POST http://couchdb:5984/recipes/_purge + Content-Type: application/json + + { + "FishStew" : [ + "17-b3eb5ac6fbaef4428d712e66483dcb79" + ] + } + +The format of the request must include the document ID and one or more +revisions that must be purged. + +The response will contain the purge sequence number, and a list of the +document IDs and revisions successfully purged. + +.. code-block:: javascript + + { + "purged" : { + "FishStew" : [ + "17-b3eb5ac6fbaef4428d712e66483dcb79" + ] + }, + "purge_seq" : 11 + } + +Updating Indexes +---------------- + +The number of purges on a database is tracked using a purge sequence. +This is used by the view indexer to optimize the updating of views that +contain the purged documents. + +When the indexer identifies that the purge sequence on a database has +changed, it compares the purge sequence of the database with that stored +in the view index. If the difference between the stored sequence and +database is sequence is only 1, then the indexer uses a cached list of +the most recently purged documents, and then removes these documents +from the index individually. This prevents completely rebuilding the +index from scratch. + +If the difference between the stored sequence number and current +database sequence is greater than 1, then the view index is entirely +rebuilt. This is an expensive operation as every document in the +database must be examined. + +``GET /db/_all_docs`` +===================== + +* **Method**: ``GET /db/_all_docs`` +* **Request**: None +* **Response**: JSON object containing document information, ordered by the + document ID +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: descending + + * **Description**: Return the documents in descending by key order + * **Optional**: yes + * **Type**: boolean + * **Default**: false + + * **Argument**: endkey + + * **Description**: Stop returning records when the specified key is reached + * **Optional**: yes + * **Type**: string + + * **Argument**: endkey_docid + + * **Description**: Stop returning records when the specified document ID is + reached + * **Optional**: yes + * **Type**: string + + * **Argument**: group + + * **Description**: Group the results using the reduce function to a group + or single row + * **Optional**: yes + * **Type**: boolean + * **Default**: false + + * **Argument**: group_level + + * **Description**: Specify the group level to be used + * **Optional**: yes + * **Type**: numeric + + * **Argument**: include_docs + + * **Description**: Include the full content of the documents in the return + * **Optional**: yes + * **Type**: boolean + * **Default**: false + + * **Argument**: inclusive_end + + * **Description**: Specifies whether the specified end key should be + included in the result + * **Optional**: yes + * **Type**: boolean + * **Default**: true + + * **Argument**: key + + * **Description**: Return only documents that match the specified key + * **Optional**: yes + * **Type**: string + + * **Argument**: limit + + * **Description**: Limit the number of the returned documents to the + specified number + * **Optional**: yes + * **Type**: numeric + + * **Argument**: reduce + + * **Description**: Use the reduction function + * **Optional**: yes + * **Type**: boolean + * **Default**: true + + * **Argument**: skip + + * **Description**: Skip this number of records before starting to return + the results + * **Optional**: yes + * **Type**: numeric + * **Default**: 0 + + * **Argument**: stale + + * **Description**: Allow the results from a stale view to be used + * **Optional**: yes + * **Type**: string + * **Default**: + * **Supported Values**: + + * **ok**: Allow stale views + + * **Argument**: startkey + + * **Description**: Return records starting with the specified key + * **Optional**: yes + * **Type**: string + + * **Argument**: startkey_docid + + * **Description**: Return records starting with the specified document ID + * **Optional**: yes + * **Type**: string + +Returns a JSON structure of all of the documents in a given database. +The information is returned as a JSON structure containing meta +information about the return structure, and the list documents and basic +contents, consisting the ID, revision and key. The key is generated from +the document ID. + ++----------------------------------+-------------------------------------------+ +| Field | Description | ++==================================+===========================================+ +| offset | Offset where the document list started | ++----------------------------------+-------------------------------------------+ +| rows [array] | Array of document object | ++----------------------------------+-------------------------------------------+ +| total_rows | Number of documents in the database/view | ++----------------------------------+-------------------------------------------+ +| update_seq | Current update sequence for the database | ++----------------------------------+-------------------------------------------+ + +By default the information returned contains only the document ID and +revision. For example, the request: + +.. code-block:: http + + GET http://couchdb:5984/recipes/_all_docs + Accept: application/json + +Returns the following structure: + +.. code-block:: javascript + + { + "total_rows" : 18386, + "rows" : [ + { + "value" : { + "rev" : "1-bc0d5aed1e339b1cc1f29578f3220a45" + }, + "id" : "Aberffrawcake", + "key" : "Aberffrawcake" + }, + { + "value" : { + "rev" : "3-68a20c89a5e70357c20148f8e82ca331" + }, + "id" : "Adukiandorangecasserole-microwave", + "key" : "Adukiandorangecasserole-microwave" + }, + { + "value" : { + "rev" : "3-9b2851ed9b6f655cc4eb087808406c60" + }, + "id" : "Aioli-garlicmayonnaise", + "key" : "Aioli-garlicmayonnaise" + }, + ... + ], + "offset" : 0 + } + +The information is returned in the form of a temporary view of all the +database documents, with the returned key consisting of the ID of the +document. The remainder of the interface is therefore identical to the +View query arguments and their behavior. + +``POST /db/_all_docs`` +====================== + +* **Method**: ``POST /db/_all_docs`` +* **Request**: JSON of the document IDs you want included +* **Response**: JSON of the returned view +* **Admin Privileges Required**: no + +The ``POST`` to ``_all_docs`` allows to specify multiple keys to be +selected from the database. This enables you to request multiple +documents in a single request, in place of multiple +:ref:`api-get-doc` requests. + +The request body should contain a list of the keys to be returned as an +array to a ``keys`` object. For example: + +.. code-block:: http + + POST http://couchdb:5984/recipes/_all_docs + User-Agent: MyApp/0.1 libwww-perl/5.837 + + { + "keys" : [ + "Zingylemontart", + "Yogurtraita" + ] + } + +The return JSON is the all documents structure, but with only the +selected keys in the output: + +.. code-block:: javascript + + { + "total_rows" : 2666, + "rows" : [ + { + "value" : { + "rev" : "1-a3544d296de19e6f5b932ea77d886942" + }, + "id" : "Zingylemontart", + "key" : "Zingylemontart" + }, + { + "value" : { + "rev" : "1-91635098bfe7d40197a1b98d7ee085fc" + }, + "id" : "Yogurtraita", + "key" : "Yogurtraita" + } + ], + "offset" : 0 + } + +``POST /db/_missing_revs`` +========================== + +* **Method**: ``POST /db/_missing_revs`` +* **Request**: JSON list of document revisions +* **Response**: JSON of missing revisions +* **Admin Privileges Required**: no + +``POST /db/_revs_diff`` +======================= + +* **Method**: ``POST /db/_revs_diff`` +* **Request**: JSON list of document revisions +* **Response**: JSON list of differences from supplied document/revision list +* **Admin Privileges Required**: no + +``GET /db/_security`` +===================== + +* **Method**: ``GET /db/_security`` +* **Request**: None +* **Response**: JSON of the security object +* **Admin Privileges Required**: no + +Gets the current security object from the specified database. The +security object consists of two compulsory elements, ``admins`` and +``readers``, which are used to specify the list of users and/or roles +that have admin and reader rights to the database respectively. Any +additional fields in the security object are optional. The entire +security object is made available to validation and other internal +functions so that the database can control and limit functionality. + +To get the existing security object you would send the following +request: + +.. code-block:: javascript + + { + "admins" : { + "roles" : [], + "names" : [ + "mc", + "slp" + ] + }, + "readers" : { + "roles" : [], + "names" : [ + "tim", + "brian" + ] + } + } + +Security object structure is: + +* **admins**: Roles/Users with admin privileges + + * **roles** [array]: List of roles with parent privilege + * **users** [array]: List of users with parent privilege + +* **readers**: Roles/Users with reader privileges + + * **roles** [array]: List of roles with parent privilege + * **users** [array]: List of users with parent privilege + +.. note:: + If the security object for a database has never been set, then the + value returned will be empty. + +``PUT /db/_security`` +===================== + +* **Method**: ``PUT /db/_security`` +* **Request**: JSON specifying the admin and user security for the database +* **Response**: JSON status message +* **Admin Privileges Required**: no + +Sets the security object for the given database.For example, to set the +security object for the ``recipes`` database: + +.. code-block:: javascript + + PUT http://couchdb:5984/recipes/_security + Content-Type: application/json + + { + "admins" : { + "roles" : [], + "names" : [ + "mc", + "slp" + ] + }, + "readers" : { + "roles" : [], + "names" : [ + "tim", + "brian" + ] + } + } + +If the setting was successful, a JSON status object will be returned: + +.. code-block:: javascript + + { + "ok" : true + } + +``GET /db/_revs_limit`` +======================= + +* **Method**: ``GET /db/_revs_limit`` +* **Request**: None +* **Response**: The current revision limit setting +* **Admin Privileges Required**: no + + +Gets the current ``revs_limit`` (revision limit) setting. + +For example to get the current limit: + +.. code-block:: http + + GET http://couchdb:5984/recipes/_revs_limit + Content-Type: application/json + +The returned information is the current setting as a numerical scalar: + +.. code-block:: javascript + + 1000 + +``PUT /db/_revs_limit`` +======================= + +* **Method**: ``PUT /db/_revs_limit`` +* **Request**: A scalar integer of the revision limit setting +* **Response**: Confirmation of setting of the revision limit +* **Admin Privileges Required**: no + +Sets the maximum number of document revisions that will be tracked by +CouchDB, even after compaction has occurred. You can set the revision +limit on a database by using ``PUT`` with a scalar integer of the limit +that you want to set as the request body. + +For example to set the revs limit to 100 for the ``recipes`` database: + +.. code-block:: http + + PUT http://couchdb:5984/recipes/_revs_limit + Content-Type: application/json + + 100 + +If the setting was successful, a JSON status object will be returned: + +.. code-block:: javascript + + { + "ok" : true + } + +.. _JSON object: #table-couchdb-api-db_db-json-changes diff -Nru couchdb-1.2.0/share/doc/build/html/_sources/api/design.txt couchdb-1.4.0~rc.1/share/doc/build/html/_sources/api/design.txt --- couchdb-1.2.0/share/doc/build/html/_sources/api/design.txt 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_sources/api/design.txt 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,1264 @@ +.. Licensed under the Apache License, Version 2.0 (the "License"); you may not +.. use this file except in compliance with the License. You may obtain a copy of +.. the License at +.. +.. http://www.apache.org/licenses/LICENSE-2.0 +.. +.. Unless required by applicable law or agreed to in writing, software +.. distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +.. WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +.. License for the specific language governing permissions and limitations under +.. the License. + +.. _api-design: + +======================= +Design Document Methods +======================= + +In CouchDB, design documents provide the main interface for building a +CouchDB application. The design document defines the views used to +extract information from CouchDB through one or more views. Design +documents are created within your CouchDB instance in the same way as +you create database documents, but the content and definition of the +documents is different. Design Documents are named using an ID defined +with the design document URL path, and this URL can then be used to +access the database contents. + +Views and lists operate together to provide automated (and formatted) +output from your database. + +A list of the available methods and URL paths are provided below: + +Design Document API Calls + +``GET /db/_design/design-doc`` +============================== + +* **Method**: ``GET /db/_design/design-doc`` +* **Request**: None +* **Response**: JSON of the existing design document +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: rev + + * **Description**: Specify the revision to return + * **Optional**: yes + * **Type**: string + + * **Argument**: revs + + * **Description**: Return a list of the revisions for the document + * **Optional**: yes + * **Type**: boolean + * **Supported Values**: + + * **true**: Includes the revisions + + * **Argument**: revs_info + + * **Description**: Return a list of detailed revision information for the + document + * **Optional**: yes + * **Type**: boolean + * **Supported Values**: + + * **true**: Includes the revisions + +Returns the specified design document, ``design-doc`` from the specified +``db``. For example, to retrieve the design document ``recipes`` you +would send the following request: + +.. code-block:: http + + GET http://couchdb:5984/recipes/_design/recipes + Content-Type: application/json + +The returned string will be the JSON of the design document: + +.. code-block:: javascript + + { + "_id" : "_design/recipes", + "_rev" : "5-39f56a392b86bbee57e2138921346406" + "language" : "javascript", + "views" : { + "by_recipe" : { + "map" : "function(doc) { if (doc.title != null) emit(doc.title, doc) }" + }, + }, + } + +A list of the revisions can be obtained by using the ``revs`` query +argument, or an extended list of revisions using the ``revs_info`` query +argument. This operates in the same way as for other documents. Fur +further examples, see :ref:`api-get-doc`. + +``PUT /db/_design/design-doc`` +============================== + +* **Method**: ``PUT /db/_design/design-doc`` +* **Request**: JSON of the design document +* **Response**: JSON status +* **Admin Privileges Required**: no + +Upload the specified design document, ``design-doc``, to the specified +database. The design document should follow the definition of a design +document, as summarised in the following table. + +* **_id**: Design Document ID +* **_rev**: Design Document Revision +* **views**: View + + * **viewname**: View Definition + + * **map**: Map Function for View + * **reduce (optional)**: Reduce Function for View + +For more information on writing views, see :ref:`views`. + +``DELETE /db/_design/design-doc`` +================================= + +* **Method**: ``DELETE /db/_design/design-doc`` +* **Request**: None +* **Response**: JSON of deleted design document +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: rev + + * **Description**: Current revision of the document for validation + * **Optional**: yes + * **Type**: string + +* **HTTP Headers** + + * **Header**: ``If-Match`` + + * **Description**: Current revision of the document for validation + * **Optional**: yes + +* **Return Codes**: + + * **409**: + Supplied revision is incorrect or missing + +Delete an existing design document. Deleting a design document also +deletes all of the associated view indexes, and recovers the +corresponding space on disk for the indexes in question. + +To delete, you must specify the current revision of the design document +using the ``rev`` query argument. + +For example: + +.. code-block:: http + + DELETE http://couchdb:5984/recipes/_design/recipes?rev=2-ac58d589b37d01c00f45a4418c5a15a8 + Content-Type: application/json + +The response contains the delete document ID and revision: + +.. code-block:: javascript + + { + "id" : "recipe/_design/recipes" + "ok" : true, + "rev" : "3-7a05370bff53186cb5d403f861aca154", + } + +``COPY /db/_design/design-doc`` +=============================== + +* **Method**: ``COPY /db/_design/design-doc`` +* **Request**: None +* **Response**: JSON of the new document and revision +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: rev + + * **Description**: Revision to copy from + * **Optional**: yes + * **Type**: string + +* **HTTP Headers** + + * **Header**: ``Destination`` + + * **Description**: Destination document (and optional revision) + * **Optional**: no + +The ``COPY`` command (non-standard HTTP) copies an existing design +document to a new or existing document. + +The source design document is specified on the request line, with the +``Destination`` HTTP Header of the request specifying the target +document. + +Copying a Design Document +------------------------- + +To copy the latest version of a design document to a new document you +specify the base document and target document: + +.. code-block:: http + + COPY http://couchdb:5984/recipes/_design/recipes + Content-Type: application/json + Destination: /recipes/_design/recipelist + +The above request copies the design document ``recipes`` to the new +design document ``recipelist``. The response is the ID and revision of +the new document. + +.. code-block:: javascript + + { + "id" : "recipes/_design/recipelist" + "rev" : "1-9c65296036141e575d32ba9c034dd3ee", + } + +.. note:: + Copying a design document does automatically reconstruct the view + indexes. These will be recreated, as with other views, the first + time the new view is accessed. + +Copying from a Specific Revision +-------------------------------- + +To copy *from* a specific version, use the ``rev`` argument to the query +string: + +.. code-block:: http + + COPY http://couchdb:5984/recipes/_design/recipes?rev=1-e23b9e942c19e9fb10ff1fde2e50e0f5 + Content-Type: application/json + Destination: recipes/_design/recipelist + +The new design document will be created using the specified revision of +the source document. + +Copying to an Existing Design Document +-------------------------------------- + +To copy to an existing document, you must specify the current revision +string for the target document, using the ``rev`` parameter to the +``Destination`` HTTP Header string. For example: + +.. code-block:: http + + COPY http://couchdb:5984/recipes/_design/recipes + Content-Type: application/json + Destination: recipes/_design/recipelist?rev=1-9c65296036141e575d32ba9c034dd3ee + +The return value will be the new revision of the copied document: + +.. code-block:: javascript + + { + "id" : "recipes/_design/recipes" + "rev" : "2-55b6a1b251902a2c249b667dab1c6692", + } + +``GET /db/_design/design-doc/attachment`` +========================================= + +* **Method**: ``GET /db/_design/design-doc/attachment`` +* **Request**: None +* **Response**: Returns the attachment data +* **Admin Privileges Required**: no + +Returns the file attachment ``attachment`` associated with the design +document ``/_design_/design-doc``. The raw data of the associated +attachment is returned (just as if you were accessing a static file. The +returned HTTP ``Content-type`` will be the same as the content type set +when the document attachment was submitted into the database. + +``PUT /db/_design/design-doc/attachment`` +========================================= + +* **Method**: ``PUT /db/_design/design-doc/attachment`` +* **Request**: Raw document data +* **Response**: JSON document status +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: rev + + * **Description**: Current document revision + * **Optional**: no + * **Type**: string + +* **HTTP Headers** + + * **Header**: ``Content-Length`` + + * **Description**: Length (bytes) of the attachment being uploaded + * **Optional**: no + + * **Header**: ``Content-Type`` + + * **Description**: MIME type for the uploaded attachment + * **Optional**: no + + * **Header**: ``If-Match`` + + * **Description**: Current revision of the document for validation + * **Optional**: yes + +Upload the supplied content as an attachment to the specified design +document (``/_design/design-doc``). The ``attachment`` name provided +must be a URL encoded string. You must also supply either the ``rev`` +query argument or the ``If-Match`` HTTP header for validation, and the +HTTP headers (to set the attacment content type). The content type is +used when the attachment is requested as the corresponding content-type +in the returned document header. + +For example, you could upload a simple text document using the following +request: + +.. code-block:: http + + PUT http://couchdb:5984/recipes/_design/recipes/view.css?rev=7-f7114d4d81124b223283f3e89eee043e + Content-Length: 39 + Content-Type: text/plain + + div.recipetitle { + font-weight: bold; + } + +Or by using the ``If-Match`` HTTP header: + +.. code-block:: http + + PUT http://couchdb:5984/recipes/FishStew/basic + If-Match: 7-f7114d4d81124b223283f3e89eee043e + Content-Length: 39 + Content-Type: text/plain + + div.recipetitle { + font-weight: bold; + } + +The returned JSON contains the new document information: + +.. code-block:: javascript + + { + "id" : "_design/recipes" + "ok" : true, + "rev" : "8-cb2b7d94eeac76782a02396ba70dfbf5", + } + +.. note:: + Uploading an attachment updates the corresponding document revision. + Revisions are tracked for the parent document, not individual attachments. + +``DELETE /db/_design/design-doc/attachment`` +============================================ + +* **Method**: ``DELETE /db/_design/design-doc/attachment`` +* **Request**: None +* **Response**: JSON status +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: rev + + * **Description**: Current document revision + * **Optional**: no + * **Type**: string + +* **HTTP Headers** + + * **Header**: ``If-Match`` + + * **Description**: Current revision of the document for validation + * **Optional**: yes + +* **Return Codes**: + + * **200**: + Attachment deleted successfully + * **409**: + Supplied revision is incorrect or missing + +Deletes the attachment ``attachment`` to the specified +``_design/design-doc``. You must supply the ``rev`` argument with the +current revision to delete the attachment. + +For example to delete the attachment ``view.css`` from the design +document ``recipes``: + +.. code-block:: http + + DELETE http://couchdb:5984/recipes/_design/recipes/view.css?rev=9-3db559f13a845c7751d407404cdeaa4a + +The returned JSON contains the updated revision information for the +parent document: + +.. code-block:: javascript + + { + "id" : "_design/recipes" + "ok" : true, + "rev" : "10-f3b15bb408961f8dcc3d86c7d3b54c4c", + } + +``GET /db/_design/design-doc/_info`` +==================================== + +* **Method**: ``GET /db/_design/design-doc/_info`` +* **Request**: None +* **Response**: JSON of the design document information +* **Admin Privileges Required**: no + +Obtains information about a given design document, including the index, +index size and current status of the design document and associated +index information. + +For example, to get the information for the ``recipes`` design document: + +.. code-block:: http + + GET http://couchdb:5984/recipes/_design/recipes/_info + Content-Type: application/json + +This returns the following JSON structure: + +.. code-block:: javascript + + { + "name" : "recipes" + "view_index" : { + "compact_running" : false, + "updater_running" : false, + "language" : "javascript", + "purge_seq" : 10, + "waiting_commit" : false, + "waiting_clients" : 0, + "signature" : "fc65594ee76087a3b8c726caf5b40687", + "update_seq" : 375031, + "disk_size" : 16491 + }, + } + +The individual fields in the returned JSON structure are detailed below: + +* **name**: Name/ID of Design Document +* **view_index**: View Index + + * **compact_running**: Indicates whether a compaction routine is currently + running on the view + * **disk_size**: Size in bytes of the view as stored on disk + * **language**: Language for the defined views + * **purge_seq**: The purge sequence that has been processed + * **signature**: MD5 signature of the views for the design document + * **update_seq**: The update sequence of the corresponding database that + has been indexed + * **updater_running**: Indicates if the view is currently being updated + * **waiting_clients**: Number of clients waiting on views from this design + document + * **waiting_commit**: Indicates if there are outstanding commits to the + underlying database that need to processed + +.. _api-get-view: + +.. _views: + +``GET /db/_design/design-doc/_view/view-name`` +============================================== + +* **Method**: ``GET /db/_design/design-doc/_view/view-name`` +* **Request**: None +* **Response**: JSON of the documents returned by the view +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: descending + + * **Description**: Return the documents in descending by key order + * **Optional**: yes + * **Type**: boolean + * **Default**: false + + * **Argument**: endkey + + * **Description**: Stop returning records when the specified key is reached + * **Optional**: yes + * **Type**: string + + * **Argument**: endkey_docid + + * **Description**: Stop returning records when the specified document + ID is reached + * **Optional**: yes + * **Type**: string + + * **Argument**: group + + * **Description**: Group the results using the reduce function to a + group or single row + * **Optional**: yes + * **Type**: boolean + * **Default**: false + + * **Argument**: group_level + + * **Description**: Specify the group level to be used + * **Optional**: yes + * **Type**: numeric + + * **Argument**: include_docs + + * **Description**: Include the full content of the documents in the return + * **Optional**: yes + * **Type**: boolean + * **Default**: false + + * **Argument**: inclusive_end + + * **Description**: Specifies whether the specified end key should be + included in the result + * **Optional**: yes + * **Type**: boolean + * **Default**: true + + * **Argument**: key + + * **Description**: Return only documents that match the specified key + * **Optional**: yes + * **Type**: string + + * **Argument**: limit + + * **Description**: Limit the number of the returned documents to the + specified number + * **Optional**: yes + * **Type**: numeric + + * **Argument**: reduce + + * **Description**: Use the reduction function + * **Optional**: yes + * **Type**: boolean + * **Default**: true + + * **Argument**: skip + + * **Description**: Skip this number of records before starting to return + the results + * **Optional**: yes + * **Type**: numeric + * **Default**: 0 + + * **Argument**: stale + + * **Description**: Allow the results from a stale view to be used + * **Optional**: yes + * **Type**: string + * **Default**: + * **Supported Values** + + * **ok**: Allow stale views + + * **Argument**: startkey + + * **Description**: Return records starting with the specified key + * **Optional**: yes + * **Type**: string + + * **Argument**: startkey_docid + + * **Description**: Return records starting with the specified document ID + * **Optional**: yes + * **Type**: string + + * **Argument**: update_seq + + * **Description**: Include the update sequence in the generated results + * **Optional**: yes + * **Type**: boolean + * **Default**: false + +Executes the specified ``view-name`` from the specified ``design-doc`` +design document. + +Querying Views and Indexes +-------------------------- + +The definition of a view within a design document also creates an index +based on the key information defined within each view. The production +and use of the index significantly increases the speed of access and +searching or selecting documents from the view. + +However, the index is not updated when new documents are added or +modified in the database. Instead, the index is generated or updated, +either when the view is first accessed, or when the view is accessed +after a document has been updated. In each case, the index is updated +before the view query is executed against the database. + +View indexes are updated incrementally in the following situations: + +- A new document has been added to the database. + +- A document has been deleted from the database. + +- A document in the database has been updated. + +View indexes are rebuilt entirely when the view definition changes. To +achieve this, a 'fingerprint' of the view definition is created when the +design document is updated. If the fingerprint changes, then the view +indexes are entirely rebuilt. This ensures that changes to the view +definitions are reflected in the view indexes. + +.. note:: + View index rebuilds occur when one view from the same the view group + (i.e. all the views defined within a single a design document) has + been determined as needing a rebuild. For example, if if you have a + design document with different views, and you update the database, + all three view indexes within the design document will be updated. + +Because the view is updated when it has been queried, it can result in a +delay in returned information when the view is accessed, especially if +there are a large number of documents in the database and the view index +does not exist. There are a number of ways to mitigate, but not +completely eliminate, these issues. These include: + +- Create the view definition (and associated design documents) on your + database before allowing insertion or updates to the documents. If + this is allowed while the view is being accessed, the index can be + updated incrementally. + +- Manually force a view request from the database. You can do this + either before users are allowed to use the view, or you can access + the view manually after documents are added or updated. + +- Use the ``/db/_changes`` method to monitor for changes to the + database and then access the view to force the corresponding view + index to be updated. See :ref:`api-changes` for more information. + +- Use a monitor with the ``update_notification`` section of the CouchDB + configuration file to monitor for changes to your database, and + trigger a view query to force the view to be updated. For more + information, see :ref:`update-notifications`. + +None of these can completely eliminate the need for the indexes to be +rebuilt or updated when the view is accessed, but they may lessen the +effects on end-users of the index update affecting the user experience. + +Another alternative is to allow users to access a 'stale' version of the +view index, rather than forcing the index to be updated and displaying +the updated results. Using a stale view may not return the latest +information, but will return the results of the view query using an +existing version of the index. + +For example, to access the existing stale view ``by_recipe`` in the +``recipes`` design document: + +.. code-block:: text + + http://couchdb:5984/recipes/_design/recipes/_view/by_recipe?stale=ok + +Accessing a stale view: + +- Does not trigger a rebuild of the view indexes, even if there have + been changes since the last access. + +- Returns the current version of the view index, if a current version + exists. + +- Returns an empty result set if the given view index does exist. + +As an alternative, you use the ``update_after`` value to the ``stale`` +parameter. This causes the view to be returned as a stale view, but for +the update process to be triggered after the view information has been +returned to the client. + +In addition to using stale views, you can also make use of the +``update_seq`` query argument. Using this query argument generates the +view information including the update sequence of the database from +which the view was generated. The returned value can be compared this to +the current update sequence exposed in the database information +(returned by :ref:`api-get-db`). + +Sorting Returned Rows +--------------------- + +Each element within the returned array is sorted using native UTF-8 +sorting according to the contents of the key portion of the emitted +content. The basic order of output is as follows: + +- ``null`` + +- ``false`` + +- ``true`` + +- Numbers + +- Text (case sensitive, lowercase first) + +- Arrays (according to the values of each element, in order) + +- Objects (according to the values of keys, in key order) + +You can reverse the order of the returned view information by using the +``descending`` query value set to true. For example, Retrieving the list +of recipes using the ``by_title`` (limited to 5 records) view: + +.. code-block:: javascript + + { + "offset" : 0, + "rows" : [ + { + "id" : "3-tiersalmonspinachandavocadoterrine", + "key" : "3-tier salmon, spinach and avocado terrine", + "value" : [ + null, + "3-tier salmon, spinach and avocado terrine" + ] + }, + { + "id" : "Aberffrawcake", + "key" : "Aberffraw cake", + "value" : [ + null, + "Aberffraw cake" + ] + }, + { + "id" : "Adukiandorangecasserole-microwave", + "key" : "Aduki and orange casserole - microwave", + "value" : [ + null, + "Aduki and orange casserole - microwave" + ] + }, + { + "id" : "Aioli-garlicmayonnaise", + "key" : "Aioli - garlic mayonnaise", + "value" : [ + null, + "Aioli - garlic mayonnaise" + ] + }, + { + "id" : "Alabamapeanutchicken", + "key" : "Alabama peanut chicken", + "value" : [ + null, + "Alabama peanut chicken" + ] + } + ], + "total_rows" : 2667 + } + +Requesting the same in descending order will reverse the entire view +content. For example the request + +.. code-block:: http + + GET http://couchdb:5984/recipes/_design/recipes/_view/by_title?limit=5&descending=true + Accept: application/json + Content-Type: application/json + +Returns the last 5 records from the view: + +.. code-block:: javascript + + { + "offset" : 0, + "rows" : [ + { + "id" : "Zucchiniinagrodolcesweet-sourcourgettes", + "key" : "Zucchini in agrodolce (sweet-sour courgettes)", + "value" : [ + null, + "Zucchini in agrodolce (sweet-sour courgettes)" + ] + }, + { + "id" : "Zingylemontart", + "key" : "Zingy lemon tart", + "value" : [ + null, + "Zingy lemon tart" + ] + }, + { + "id" : "Zestyseafoodavocado", + "key" : "Zesty seafood avocado", + "value" : [ + null, + "Zesty seafood avocado" + ] + }, + { + "id" : "Zabaglione", + "key" : "Zabaglione", + "value" : [ + null, + "Zabaglione" + ] + }, + { + "id" : "Yogurtraita", + "key" : "Yogurt raita", + "value" : [ + null, + "Yogurt raita" + ] + } + ], + "total_rows" : 2667 + } + +The sorting direction is applied before the filtering applied using the +``startkey`` and ``endkey`` query arguments. For example the following +query: + +.. code-block:: http + + GET http://couchdb:5984/recipes/_design/recipes/_view/by_ingredient?startkey=%22carrots%22&endkey=%22egg%22 + Accept: application/json + Content-Type: application/json + +Will operate correctly when listing all the matching entries between +“carrots” and ``egg``. If the order of output is reversed with the +``descending`` query argument, the view request will return no entries: + +.. code-block:: http + + GET http://couchdb:5984/recipes/_design/recipes/_view/by_ingredient?descending=true&startkey=%22carrots%22&endkey=%22egg%22 + Accept: application/json + Content-Type: application/json + +The returned result is empty: + +.. code-block:: javascript + + { + "total_rows" : 26453, + "rows" : [], + "offset" : 21882 + } + +The results will be empty because the entries in the view are reversed +before the key filter is applied, and therefore the ``endkey`` of “egg” +will be seen before the ``startkey`` of “carrots”, resulting in an empty +list. + +Instead, you should reverse the values supplied to the ``startkey`` and +``endkey`` parameters to match the descending sorting applied to the +keys. Changing the previous example to: + +.. code-block:: http + + GET http://couchdb:5984/recipes/_design/recipes/_view/by_ingredient?descending=true&startkey=%22egg%22&endkey=%22carrots%22 + Accept: application/json + Content-Type: application/json + +Specifying Start and End Values +------------------------------- + +.. todo:: Specifying Start and End Values + +The ``startkey`` and ``endkey`` query arguments can be used to specify +the range of values to be displayed when querying the view. + +Using Limits and Skipping Rows +------------------------------ + +.. todo:: Using Limits and Skipping Rows + +TBC + +View Reduction and Grouping +--------------------------- + +.. todo:: View Reduction and Grouping + +TBC + +``POST /db/_design/design-doc/_view/view-name`` +=============================================== + +* **Method**: ``POST /db/_design/design-doc/_view/view-name`` +* **Request**: List of keys to be returned from specified view +* **Response**: JSON of the documents returned by the view +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: descending + + * **Description**: Return the documents in descending by key order + * **Optional**: yes + * **Type**: boolean + * **Default**: false + + * **Argument**: endkey + + * **Description**: Stop returning records when the specified key is reached + * **Optional**: yes + * **Type**: string + + * **Argument**: endkey_docid + + * **Description**: Stop returning records when the specified document ID + is reached + * **Optional**: yes + * **Type**: string + + * **Argument**: group + + * **Description**: Group the results using the reduce function to a group + or single row + * **Optional**: yes + * **Type**: boolean + * **Default**: false + + * **Argument**: group_level + + * **Description**: Specify the group level to be used + * **Optional**: yes + * **Type**: numeric + + * **Argument**: include_docs + + * **Description**: Include the full content of the documents in the return + * **Optional**: yes + * **Type**: boolean + * **Default**: false + + * **Argument**: inclusive_end + + * **Description**: Specifies whether the specified end key should be + included in the result + * **Optional**: yes + * **Type**: boolean + * **Default**: true + + * **Argument**: key + + * **Description**: Return only documents that match the specified key + * **Optional**: yes + * **Type**: string + + * **Argument**: limit + + * **Description**: Limit the number of the returned documents to the + specified number + * **Optional**: yes + * **Type**: numeric + + * **Argument**: reduce + + * **Description**: Use the reduction function + * **Optional**: yes + * **Type**: boolean + * **Default**: true + + * **Argument**: skip + + * **Description**: Skip this number of records before starting to return + the results + * **Optional**: yes + * **Type**: numeric + * **Default**: 0 + + * **Argument**: stale + + * **Description**: Allow the results from a stale view to be used + * **Optional**: yes + * **Type**: string + * **Default**: + * **Supported Values**: + + * **ok**: Allow stale views + + * **Argument**: startkey + + * **Description**: Return records starting with the specified key + * **Optional**: yes + * **Type**: string + + * **Argument**: startkey_docid + + * **Description**: Return records starting with the specified document ID + * **Optional**: yes + * **Type**: string + + * **Argument**: update_seq + + * **Description**: Include the update sequence in the generated results + * **Optional**: yes + * **Type**: boolean + * **Default**: false + +Executes the specified ``view-name`` from the specified ``design-doc`` +design document. Unlike the ``GET`` method for accessing views, the +``POST`` method supports the specification of explicit keys to be +retrieved from the view results. The remainder of the ``POST`` view +functionality is identical to the :ref:`api-get-view` API. + +For example, the request below will return all the recipes where the key +for the view matches either “claret” or “clear apple cider” : + +.. code-block:: http + + POST http://couchdb:5984/recipes/_design/recipes/_view/by_ingredient + Content-Type: application/json + + { + "keys" : [ + "claret", + "clear apple juice" + ] + } + + +The returned view data contains the standard view information, but only +where the keys match. + +.. code-block:: javascript + + { + "total_rows" : 26484, + "rows" : [ + { + "value" : [ + "Scotch collops" + ], + "id" : "Scotchcollops", + "key" : "claret" + }, + { + "value" : [ + "Stand pie" + ], + "id" : "Standpie", + "key" : "clear apple juice" + } + ], + "offset" : 6324 + } + +Multi-document Fetching +----------------------- + +By combining the ``POST`` method to a given view with the +``include_docs=true`` query argument you can obtain multiple documents +from a database. The result is more efficient than using multiple +:ref:`api-get-doc` requests. + +For example, sending the following request for ingredients matching +“claret” and “clear apple juice”: + +.. code-block:: http + + POST http://couchdb:5984/recipes/_design/recipes/_view/by_ingredient?include_docs=true + Content-Type: application/json + + { + "keys" : [ + "claret", + "clear apple juice" + ] + } + +Returns the full document for each recipe: + +.. code-block:: javascript + + { + "offset" : 6324, + "rows" : [ + { + "doc" : { + "_id" : "Scotchcollops", + "_rev" : "1-bcbdf724f8544c89697a1cbc4b9f0178", + "cooktime" : "8", + "ingredients" : [ + { + "ingredient" : "onion", + "ingredtext" : "onion, peeled and chopped", + "meastext" : "1" + }, + ... + ], + "keywords" : [ + "cook method.hob, oven, grill@hob", + "diet@wheat-free", + "diet@peanut-free", + "special collections@classic recipe", + "cuisine@british traditional", + "diet@corn-free", + "diet@citrus-free", + "special collections@very easy", + "diet@shellfish-free", + "main ingredient@meat", + "occasion@christmas", + "meal type@main", + "diet@egg-free", + "diet@gluten-free" + ], + "preptime" : "10", + "servings" : "4", + "subtitle" : "This recipe comes from an old recipe book of 1683 called 'The Gentlewoman's Kitchen'. This is an excellent way of making a rich and full-flavoured meat dish in a very short time.", + "title" : "Scotch collops", + "totaltime" : "18" + }, + "id" : "Scotchcollops", + "key" : "claret", + "value" : [ + "Scotch collops" + ] + }, + { + "doc" : { + "_id" : "Standpie", + "_rev" : "1-bff6edf3ca2474a243023f2dad432a5a", + "cooktime" : "92", + "ingredients" : [ + ... ], + "keywords" : [ + "diet@dairy-free", + "diet@peanut-free", + "special collections@classic recipe", + "cuisine@british traditional", + "diet@corn-free", + "diet@citrus-free", + "occasion@buffet party", + "diet@shellfish-free", + "occasion@picnic", + "special collections@lunchbox", + "main ingredient@meat", + "convenience@serve with salad for complete meal", + "meal type@main", + "cook method.hob, oven, grill@hob / oven", + "diet@cow dairy-free" + ], + "preptime" : "30", + "servings" : "6", + "subtitle" : "Serve this pie with pickled vegetables and potato salad.", + "title" : "Stand pie", + "totaltime" : "437" + }, + "id" : "Standpie", + "key" : "clear apple juice", + "value" : [ + "Stand pie" + ] + } + ], + "total_rows" : 26484 + } + +``GET /db/_design/design-doc/_show/show-name`` +=============================================== + +.. todo:: GET /db/_design/design-doc/_show/show-name + +* **Method**: ``GET /db/_design/design-doc/_show/show-name`` +* **Request**: None +* **Response**: Returns the result of the show +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: details + + * **Description**: Indicates whether details should be included + * **Optional**: yes + * **Type**: string + + * **Argument**: format + + * **Description**: Format of the returned information + * **Optional**: yes + * **Type**: string + +``POST /db/_design/design-doc/_show/show-name/doc`` +=================================================== + +.. todo:: POST /db/_design/design-doc/_show/show-name/doc + +* **Method**: ``POST /db/_design/design-doc/_show/show-name`` +* **Request**: Custom data +* **Response**: Returns the result of the show +* **Admin Privileges Required**: no + +``GET /db/_design/design-doc/_list/list-name/other-design-doc/view-name`` +========================================================================= + +.. todo:: GET /db/_design/design-doc/_list/list-name/other-design-doc/view-name + +* **Method**: ``GET /db/_design/design-doc/_list/list-name/other-design-doc/view-name`` +* **Request**: TBC +* **Response**: TBC +* **Admin Privileges Required**: no + +``POST /db/_design/design-doc/_list/list-name/other-design-doc/view-name`` +========================================================================== + +.. todo:: POST /db/_design/design-doc/_list/list-name/other-design-doc/view-name + +* **Method**: ``POST /db/_design/design-doc/_list/list-name/other-design-doc/view-name`` +* **Request**: TBC +* **Response**: TBC +* **Admin Privileges Required**: no + +``GET /db/_design/design-doc/_list/list-name/view-name`` +======================================================== + +.. todo:: GET /db/_design/design-doc/_list/list-name/view-name + +* **Method**: ``GET /db/_design/design-doc/_list/list-name/view-name`` +* **Request**: TBC +* **Response**: TBC +* **Admin Privileges Required**: no + +``POST /db/_design/design-doc/_list/list-name/view-name`` +========================================================= + +.. todo:: POST /db/_design/design-doc/_list/list-name/view-name + +* **Method**: ``POST /db/_design/design-doc/_list/list-name/view-name`` +* **Request**: TBC +* **Response**: TBC +* **Admin Privileges Required**: no + +``PUT /db/_design/design-doc/_update/updatename/doc`` +===================================================== + +.. todo:: POST /db/_design/design-doc/_update/updatename/doc + +* **Method**: ``POST /db/_design/design-doc/_update/updatename/doc`` +* **Request**: TBC +* **Response**: TBC +* **Admin Privileges Required**: no + +``POST /db/_design/design-doc/_update/updatename`` +================================================== + +.. todo:: PUT /db/_design/design-doc/_update/updatename/doc + +* **Method**: ``PUT /db/_design/design-doc/_update/updatename/doc`` +* **Request**: TBC +* **Response**: TBC +* **Admin Privileges Required**: no + +``ALL /db/_design/design-doc/_rewrite/rewrite-name/anything`` +============================================================= + +.. todo:: ALL /db/_design/design-doc/_rewrite/rewrite-name/anything + +* **Method**: ``ALL /db/_design/design-doc/_rewrite/rewrite-name/anything`` +* **Request**: TBC +* **Response**: TBC +* **Admin Privileges Required**: no diff -Nru couchdb-1.2.0/share/doc/build/html/_sources/api/documents.txt couchdb-1.4.0~rc.1/share/doc/build/html/_sources/api/documents.txt --- couchdb-1.2.0/share/doc/build/html/_sources/api/documents.txt 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_sources/api/documents.txt 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,973 @@ +.. Licensed under the Apache License, Version 2.0 (the "License"); you may not +.. use this file except in compliance with the License. You may obtain a copy of +.. the License at +.. +.. http://www.apache.org/licenses/LICENSE-2.0 +.. +.. Unless required by applicable law or agreed to in writing, software +.. distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +.. WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +.. License for the specific language governing permissions and limitations under +.. the License. + +.. _api-doc: + +================ +Document Methods +================ + +The CouchDB API Server Document methods detail how to create, read, +update and delete documents within a database. + +A list of the available methods and URL paths are provided below: + ++--------+-------------------------+-------------------------------------------+ +| Method | Path | Description | ++========+=========================+===========================================+ +| POST | /db | Create a new document | ++--------+-------------------------+-------------------------------------------+ +| GET | /db/doc | Returns the latest revision of the | +| | | document | ++--------+-------------------------+-------------------------------------------+ +| HEAD | /db/doc | Returns bare information in the HTTP | +| | | Headers for the document | ++--------+-------------------------+-------------------------------------------+ +| PUT | /db/doc | Inserts a new document, or new version | +| | | of an existing document | ++--------+-------------------------+-------------------------------------------+ +| DELETE | /db/doc | Deletes the document | ++--------+-------------------------+-------------------------------------------+ +| COPY | /db/doc | Copies the document | ++--------+-------------------------+-------------------------------------------+ +| GET | /db/doc/attachment | Gets the attachment of a document | ++--------+-------------------------+-------------------------------------------+ +| PUT | /db/doc/attachment | Adds an attachment of a document | ++--------+-------------------------+-------------------------------------------+ +| DELETE | /db/doc/attachment | Deletes an attachment of a document | ++--------+-------------------------+-------------------------------------------+ + +``POST /db`` +============ + +* **Method**: ``POST /db`` +* **Request**: JSON of the new document +* **Response**: JSON with the committed document information +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: batch + + * **Description**: Allow document store request to be batched with others + * **Optional**: yes + * **Type**: string + * **Supported Values**: asd + * **ok**: Enable + +* **Return Codes**: + + * **201**: + Document has been created successfully + * **409**: + Conflict - a document with the specified document ID already exists + +Create a new document in the specified database, using the supplied JSON +document structure. If the JSON structure includes the ``_id`` field, +then the document will be created with the specified document ID. If the +``_id`` field is not specified, a new unique ID will be generated. + +For example, you can generate a new document with a generated UUID using +the following request: + +.. code-block:: http + + POST http://couchdb:5984/recipes/ + Content-Type: application/json + + { + "servings" : 4, + "subtitle" : "Delicious with fresh bread", + "title" : "Fish Stew" + } + +The return JSON will specify the automatically generated ID and revision +information: + +.. code-block:: javascript + + { + "id" : "64575eef70ab90a2b8d55fc09e00440d", + "ok" : true, + "rev" : "1-9c65296036141e575d32ba9c034dd3ee" + } + +Specifying the Document ID +-------------------------- + +The document ID can be specified by including the ``_id`` field in the +JSON of the submitted record. The following request will create the same +document with the ID ``FishStew``: + +.. code-block:: http + + POST http://couchdb:5984/recipes/ + Content-Type: application/json + + { + "_id" : "FishStew", + "servings" : 4, + "subtitle" : "Delicious with fresh bread", + "title" : "Fish Stew" + } + +The structure of the submitted document is as shown in the table below: + +In either case, the returned JSON will specify the document ID, revision +ID, and status message: + +.. code-block:: javascript + + { + "id" : "FishStew", + "ok" : true, + "rev" : "1-9c65296036141e575d32ba9c034dd3ee" + } + + +.. _api-batch-writes: + +UUID generation algorithms +-------------------------- + +CouchDB supports a number of different UUID generation algorithms for use +in situations where a user-specified UUID does not make sense. These +can be set simply by `PUT http://couchdb:5984/_config/uuids/algorithm`. + + ++---------------+---------------------+------------------------------------+ +| Algorithm | Description | Sample UUID | ++===============+=====================+====================================+ +| random | 128 bits of pure | - 43febce5675468a5467fb5467ce9e6c0 | +| | random awesomeness | | ++---------------+---------------------+------------------------------------+ +| sequential | monotonically | - f755c413badf66b22941313f9f001e28 | +| | increasing ids with | - f755c413badf66b22941313f9f0024ca | +| | random increments | - f755c413badf66b22941313f9f00332c | ++---------------+---------------------+------------------------------------+ +| utc_random | time since start of | - 04cfa405381205204f75100d0241ccc3 | +| | epoch, as 14 hex | - 04cfa4059c48e76e7c054bbe033dd8db | +| | digits, followed by | - 04cfa405fce10b0df4c08f95e667cd2f | +| | 18 random digits. | | ++---------------+---------------------+------------------------------------+ +| utc_id | time since start of | - 04cfa718b00848_i_am_in_yer_couch | +| & additional | epoch, as 14 hex | - 04cfa71d377aef_i_am_in_yer_couch | +| parameter | digits, followed by | - 04cfa71e0deabd_i_am_in_yer_couch | +| | utc_id_suffix. | | ++---------------+---------------------+------------------------------------+ + +.. Impact of UUID choices:: + The choice of UUID has a signficant impact on the layout of the B-tree, + prior to compaction. For example, a sequential UUID algorithm during + uploading thousands of documents, will avoid the need to rewrite many + intermediate B-tree nodes. A random UUID algorithm may require rewriting + intermediate nodes on a regular basis, with a corresponding decrease of + throughput, and significant wasted space due to the append-only B-tree + design. It is generally recommended to set your own UUIDs, or use the + sequential algorithm unless you have a specific need and take into account + the likely need for compaction to re-balance the B-tree and reclaim wasted + space. + + +Batch Mode Writes +----------------- + +You can write documents to the database at a higher rate by using the +batch option. This collects document writes together in memory (on a +user-by-user basis) before they are committed to disk. This increases +the risk of the documents not being stored in the event of a failure, +since the documents are not written to disk immediately. + +To use the batched mode, append the ``batch=ok`` query argument to the +URL of the ``PUT`` or ``POST`` request. The CouchDB server will respond +with a 202 HTTP response code immediately. + +Including Attachments +--------------------- + +You can include one or more attachments with a given document by +incorporating the attachment information within the JSON of the +document. This provides a simpler alternative to loading documents with +attachments than making a separate call (see :ref:`api-put-attachment`). + +* **_id** (optional): Document ID +* **_rev** (optional): Revision ID (when updating an existing document) +* **_attachments** (optional): Document Attachment + + * **filename**: Attachment information + + * **content_type**: MIME Content type string + * **data**: File attachment content, Base64 encoded + +The ``filename`` will be the attachment name. For example, when sending +the JSON structure below: + +.. code-block:: javascript + + { + "_id" : "FishStew", + "servings" : 4, + "subtitle" : "Delicious with fresh bread", + "title" : "Fish Stew" + "_attachments" : { + "styling.css" : { + "content-type" : "text/css", + "data" : "cCB7IGZvbnQtc2l6ZTogMTJwdDsgfQo=", + }, + }, + } + + +The attachment ``styling.css`` can be accessed using +``/recipes/FishStew/styling.css``. For more information on attachments, +see :ref:`api-get-attachment`. + +The document data embedded in to the structure must be encoded using +base64. + +.. _api-get-doc: + +``GET /db/doc`` +=============== + +* **Method**: ``GET /db/doc`` +* **Request**: None +* **Response**: Returns the JSON for the document +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: conflicts + + * **Description**: Returns the conflict tree for the document. + * **Optional**: yes + * **Type**: boolean + * **Default**: false + * **Supported Values**: + + * **true**: Includes the revisions + + * **Argument**: rev + + * **Description**: Specify the revision to return + * **Optional**: yes + * **Type**: string + * **Supported Values**: + + * **true**: Includes the revisions + + * **Argument**: revs + + * **Description**: Return a list of the revisions for the document + * **Optional**: yes + * **Type**: boolean + + * **Argument**: revs_info + + * **Description**: Return a list of detailed revision information for the + document + * **Optional**: yes + * **Type**: boolean + * **Supported Values**: + + * **true**: Includes the revisions + +* **Return Codes**: + + * **200**: + Document retrieved + * **400**: + The format of the request or revision was invalid + * **404**: + The specified document or revision cannot be found, or has been deleted + * **409**: + Conflict - a document with the specified document ID already exists + +Returns the specified ``doc`` from the specified ``db``. For example, to +retrieve the document with the id ``FishStew`` you would send the +following request: + +.. code-block:: http + + GET http://couchdb:5984/recipes/FishStew + Content-Type: application/json + Accept: application/json + +The returned JSON is the JSON of the document, including the document ID +and revision number: + +.. code-block:: javascript + + { + "_id" : "FishStew", + "_rev" : "3-a1a9b39ee3cc39181b796a69cb48521c", + "servings" : 4, + "subtitle" : "Delicious with a green salad", + "title" : "Irish Fish Stew" + } + + +Unless you request a specific revision, the latest revision of the +document will always be returned. + +Attachments +----------- + +If the document includes attachments, then the returned structure will +contain a summary of the attachments associated with the document, but +not the attachment data itself. + +The JSON for the returned document will include the ``_attachments`` +field, with one or more attachment definitions. For example: + +.. code-block:: javascript + + { + "_id" : "FishStew", + "servings" : 4, + "subtitle" : "Delicious with fresh bread", + "title" : "Fish Stew" + "_attachments" : { + "styling.css" : { + "stub" : true, + "content-type" : "text/css", + "length" : 783426, + }, + }, + } + +The format of the returned JSON is shown in the table below: + +* **_id** (optional): Document ID +* **_rev** (optional): Revision ID (when updating an existing document) +* **_attachments** (optional): Document Attachment + + * **filename**: Attachment information + + * **content_type**: MIME Content type string + * **length**: Length (bytes) of the attachment data + * **revpos**: Revision where this attachment exists + * **stub**: Indicates whether the attachment is a stub + +Getting a List of Revisions +--------------------------- + +You can obtain a list of the revisions for a given document by adding +the ``revs=true`` parameter to the request URL. For example: + +.. code-block:: http + + GET http://couchdb:5984/recipes/FishStew?revs=true + Accept: application/json + +The returned JSON structure includes the original document, including a +``_revisions`` structure that includes the revision information: + +.. code-block:: javascript + + { + "servings" : 4, + "subtitle" : "Delicious with a green salad", + "_id" : "FishStew", + "title" : "Irish Fish Stew", + "_revisions" : { + "ids" : [ + "a1a9b39ee3cc39181b796a69cb48521c", + "7c4740b4dcf26683e941d6641c00c39d", + "9c65296036141e575d32ba9c034dd3ee" + ], + "start" : 3 + }, + "_rev" : "3-a1a9b39ee3cc39181b796a69cb48521c" + } + +* **_id** (optional): Document ID +* **_rev** (optional): Revision ID (when updating an existing document) +* **_revisions**: CouchDB Document Revisions + + * **ids** [array]: Array of valid revision IDs, in reverse order + (latest first) + * **start**: Prefix number for the latest revision + +Obtaining an Extended Revision History +-------------------------------------- + +You can get additional information about the revisions for a given +document by supplying the ``revs_info`` argument to the query: + +.. code-block:: http + + GET http://couchdb:5984/recipes/FishStew?revs_info=true + Accept: application/json + +This returns extended revision information, including the availability +and status of each revision: + +.. code-block:: javascript + + { + "servings" : 4, + "subtitle" : "Delicious with a green salad", + "_id" : "FishStew", + "_revs_info" : [ + { + "status" : "available", + "rev" : "3-a1a9b39ee3cc39181b796a69cb48521c" + }, + { + "status" : "available", + "rev" : "2-7c4740b4dcf26683e941d6641c00c39d" + }, + { + "status" : "available", + "rev" : "1-9c65296036141e575d32ba9c034dd3ee" + } + ], + "title" : "Irish Fish Stew", + "_rev" : "3-a1a9b39ee3cc39181b796a69cb48521c" + } + +* **_id** (optional): Document ID +* **_rev** (optional): Revision ID (when updating an existing document) +* **_revs_info** [array]: CouchDB Document Extended Revision Info + + * **rev**: Full revision string + * **status**: Status of the revision + +Obtaining a Specific Revision +----------------------------- + +To get a specific revision, use the ``rev`` argument to the request, and +specify the full revision number: + +.. code-block:: http + + GET http://couchdb:5984/recipes/FishStew?rev=2-7c4740b4dcf26683e941d6641c00c39d + Accept: application/json + +The specified revision of the document will be returned, including a +``_rev`` field specifying the revision that was requested: + +.. code-block:: javascript + + { + "_id" : "FishStew", + "_rev" : "2-7c4740b4dcf26683e941d6641c00c39d", + "servings" : 4, + "subtitle" : "Delicious with a green salad", + "title" : "Fish Stew" + } + +``HEAD /db/doc`` +================ + +* **Method**: ``HEAD /db/doc`` +* **Request**: None +* **Response**: None +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: rev + + * **Description**: Specify the revision to return + * **Optional**: yes + * **Type**: string + + * **Argument**: revs + + * **Description**: Return a list of the revisions for the document + * **Optional**: yes + * **Type**: boolean + + * **Argument**: revs_info + + * **Description**: Return a list of detailed revision information for the + document + * **Optional**: yes + * **Type**: boolean + +* **Return Codes**: + + * **404**: + The specified document or revision cannot be found, or has been deleted + +Returns the HTTP Headers containing a minimal amount of information +about the specified document. The method supports the same query +arguments as the ``GET`` method, but only the header information +(including document size, and the revision as an ETag), is returned. For +example, a simple ``HEAD`` request: + +.. code-block:: http + + HEAD http://couchdb:5984/recipes/FishStew + Content-Type: application/json + + +Returns the following HTTP Headers: + +.. code-block:: javascript + + HTTP/1.1 200 OK + Server: CouchDB/1.0.1 (Erlang OTP/R13B) + Etag: "7-a19a1a5ecd946dad70e85233ba039ab2" + Date: Fri, 05 Nov 2010 14:54:43 GMT + Content-Type: text/plain;charset=utf-8 + Content-Length: 136 + Cache-Control: must-revalidate + +The ``Etag`` header shows the current revision for the requested +document, and the ``Content-Length`` specifies the length of the data, +if the document were requested in full. + +Adding any of the query arguments (as supported by ```GET```_ method), +then the resulting HTTP Headers will correspond to what would be +returned. Note that the current revision is not returned when the +``refs_info`` argument is used. For example: + +.. code-block:: http + + HTTP/1.1 200 OK + Server: CouchDB/1.0.1 (Erlang OTP/R13B) + Date: Fri, 05 Nov 2010 14:57:16 GMT + Content-Type: text/plain;charset=utf-8 + Content-Length: 609 + Cache-Control: must-revalidate + +.. _api-put-doc: + +``PUT /db/doc`` +=============== + +* **Method**: ``PUT /db/doc`` +* **Request**: JSON of the new document, or updated version of the existed + document +* **Response**: JSON of the document ID and revision +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: batch + + * **Description**: Allow document store request to be batched with others + * **Optional**: yes + * **Type**: string + * **Supported Values**: + + * **ok**: Enable + +* **HTTP Headers** + + * **Header**: ``If-Match`` + + * **Description**: Current revision of the document for validation + * **Optional**: yes + +* **Return Codes**: + + * **201**: + Document has been created successfully + * **202**: + Document accepted for writing (batch mode) + + +The ``PUT`` method creates a new named document, or creates a new +revision of the existing document. Unlike the ``POST`` method, you +must specify the document ID in the request URL. + +For example, to create the document ``FishStew``, you would send the +following request: + +.. code-block:: http + + PUT http://couchdb:5984/recipes/FishStew + Content-Type: application/json + + { + "servings" : 4, + "subtitle" : "Delicious with fresh bread", + "title" : "Fish Stew" + } + +The return type is JSON of the status, document ID,and revision number: + +.. code-block:: javascript + + { + "id" : "FishStew", + "ok" : true, + "rev" : "1-9c65296036141e575d32ba9c034dd3ee" + } + +Updating an Existing Document +----------------------------- + +To update an existing document you must specify the current revision +number within the ``_rev`` parameter. For example: + +.. code-block:: http + + PUT http://couchdb:5984/recipes/FishStew + Content-Type: application/json + + { + "_rev" : "1-9c65296036141e575d32ba9c034dd3ee", + "servings" : 4, + "subtitle" : "Delicious with fresh salad", + "title" : "Fish Stew" + } + +Alternatively, you can supply the current revision number in the +``If-Match`` HTTP header of the request. For example: + +.. code-block:: http + + PUT http://couchdb:5984/recipes/FishStew + If-Match: 2-d953b18035b76f2a5b1d1d93f25d3aea + Content-Type: application/json + + { + "servings" : 4, + "subtitle" : "Delicious with fresh salad", + "title" : "Fish Stew" + } + +The JSON returned will include the updated revision number: + +.. code-block:: javascript + + { + "id" : "FishStew99", + "ok" : true, + "rev" : "2-d953b18035b76f2a5b1d1d93f25d3aea" + } + +For information on batched writes, which can provide improved +performance, see :ref:`api-batch-writes`. + +.. _api-del-doc: + +``DELETE /db/doc`` +================== + +* **Method**: ``DELETE /db/doc`` +* **Request**: None +* **Response**: JSON of the deleted revision +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: rev + + * **Description**: Current revision of the document for validation + * **Optional**: yes + * **Type**: string + +* **HTTP Headers** + + * **Header**: ``If-Match`` + + * **Description**: Current revision of the document for validation + * **Optional**: yes + +* **Return Codes**: + + * **409**: + Revision is missing, invalid or not the latest + +Deletes the specified document from the database. You must supply the +current (latest) revision, either by using the ``rev`` parameter to +specify the revision: + +.. code-block:: http + + DELETE http://couchdb:5984/recipes/FishStew?rev=3-a1a9b39ee3cc39181b796a69cb48521c + Content-Type: application/json + +Alternatively, you can use ETags with the ``If-Match`` field: + +.. code-block:: http + + DELETE http://couchdb:5984/recipes/FishStew + If-Match: 3-a1a9b39ee3cc39181b796a69cb48521c + Content-Type: application/json + + +The returned JSON contains the document ID, revision and status: + +.. code-block:: javascript + + { + "id" : "FishStew", + "ok" : true, + "rev" : "4-2719fd41187c60762ff584761b714cfb" + } + +.. note:: Note that deletion of a record increments the revision number. The + use of a revision for deletion of the record allows replication of + the database to correctly track the deletion in synchronized copies. + +.. _api-copy-doc: + +``COPY /db/doc`` +================ + +* **Method**: ``COPY /db/doc`` +* **Request**: None +* **Response**: JSON of the new document and revision +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: rev + + * **Description**: Revision to copy from + * **Optional**: yes + * **Type**: string + +* **HTTP Headers** + + * **Header**: ``Destination`` + + * **Description**: Destination document (and optional revision) + * **Optional**: no + +* **Return Codes**: + + * **201**: + Document has been copied and created successfully + * **409**: + Revision is missing, invalid or not the latest + +The ``COPY`` command (which is non-standard HTTP) copies an existing +document to a new or existing document. + +The source document is specified on the request line, with the +``Destination`` HTTP Header of the request specifying the target +document. + +Copying a Document +------------------ + +You can copy the latest version of a document to a new document by +specifying the current document and target document: + +.. code-block:: http + + COPY http://couchdb:5984/recipes/FishStew + Content-Type: application/json + Destination: IrishFishStew + +The above request copies the document ``FishStew`` to the new document +``IrishFishStew``. The response is the ID and revision of the new +document. + +.. code-block:: javascript + + { + "id" : "IrishFishStew", + "rev" : "1-9c65296036141e575d32ba9c034dd3ee" + } + +Copying from a Specific Revision +-------------------------------- + +To copy *from* a specific version, use the ``rev`` argument to the query +string: + +.. code-block:: http + + COPY http://couchdb:5984/recipes/FishStew?rev=5-acfd32d233f07cea4b4f37daaacc0082 + Content-Type: application/json + Destination: IrishFishStew + +The new document will be created using the information in the specified +revision of the source document. + +Copying to an Existing Document +------------------------------- + +To copy to an existing document, you must specify the current revision +string for the target document, using the ``rev`` parameter to the +``Destination`` HTTP Header string. For example: + +.. code-block:: http + + COPY http://couchdb:5984/recipes/FishStew + Content-Type: application/json + Destination: IrishFishStew?rev=1-9c65296036141e575d32ba9c034dd3ee + +The return value will be the new revision of the copied document: + +.. code-block:: javascript + + { + "id" : "IrishFishStew", + "rev" : "2-55b6a1b251902a2c249b667dab1c6692" + } + +.. _api-get-attachment: + +``GET /db/doc/attachment`` +========================== + +* **Method**: ``GET /db/doc/attachment`` +* **Request**: None +* **Response**: Returns the attachment data +* **Admin Privileges Required**: no + +Returns the file attachment ``attachment`` associated with the document +``doc``. The raw data of the associated attachment is returned (just as +if you were accessing a static file. The returned HTTP ``Content-type`` +will be the same as the content type set when the document attachment +was submitted into the database. + +.. _api-put-attachment: + +``PUT /db/doc/attachment`` +========================== + +* **Method**: ``PUT /db/doc/attachment`` +* **Request**: Raw document data +* **Response**: JSON document status +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: rev + + * **Description**: Current document revision + * **Optional**: no + * **Type**: string + +* **HTTP Headers** + + * **Header**: ``Content-Length`` + + * **Description**: Length (bytes) of the attachment being uploaded + * **Optional**: no + + * **Header**: ``Content-Type`` + + * **Description**: MIME type for the uploaded attachment + * **Optional**: no + + * **Header**: ``If-Match`` + + * **Description**: Current revision of the document for validation + * **Optional**: yes + +* **Return Codes**: + + * **201**: + Attachment has been accepted + +Upload the supplied content as an attachment to the specified document +(``doc``). The ``attachment`` name provided must be a URL encoded +string. You must also supply either the ``rev`` query argument or the +``If-Match`` HTTP header for validation, and the HTTP headers (to set +the attachment content type). The content type is used when the +attachment is requested as the corresponding content-type in the +returned document header. + +For example, you could upload a simple text document using the following +request: + +.. code-block:: http + + PUT http://couchdb:5984/recipes/FishStew/basic?rev=8-a94cb7e50ded1e06f943be5bfbddf8ca + Content-Length: 10 + Content-Type: text/plain + + Roast it + +Or by using the ``If-Match`` HTTP header: + +.. code-block:: http + + PUT http://couchdb:5984/recipes/FishStew/basic + If-Match: 8-a94cb7e50ded1e06f943be5bfbddf8ca + Content-Length: 10 + Content-Type: text/plain + + Roast it + +The returned JSON contains the new document information: + +.. code-block:: javascript + + { + "id" : "FishStew", + "ok" : true, + "rev" : "9-247bb19a41bfd9bfdaf5ee6e2e05be74" + } + +.. note:: Uploading an attachment updates the corresponding document revision. + Revisions are tracked for the parent document, not individual + attachments. + +Updating an Existing Attachment +------------------------------- + +Uploading an attachment using an existing attachment name will update +the corresponding stored content of the database. Since you must supply +the revision information to add an attachment to a document, this serves +as validation to update the existing attachment. + +``DELETE /db/doc/attachment`` +============================= + +* **Method**: ``DELETE /db/doc/attachment`` +* **Request**: None +* **Response**: JSON status +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: rev + + * **Description**: Current document revision + * **Optional**: no + * **Type**: string + +* **HTTP Headers** + + * **Header**: ``If-Match`` + + * **Description**: Current revision of the document for validation + * **Optional**: yes + +* **Return Codes**: + + * **200**: + Attachment deleted successfully + * **409**: + Supplied revision is incorrect or missing + +Deletes the attachment ``attachment`` to the specified ``doc``. You must +supply the ``rev`` argument with the current revision to delete the +attachment. + +For example to delete the attachment ``basic`` from the recipe +``FishStew``: + +.. code-block:: http + + DELETE http://couchdb:5984/recipes/FishStew/basic?rev=9-247bb19a41bfd9bfdaf5ee6e2e05be74 + Content-Type: application/json + + + +The returned JSON contains the updated revision information: + +.. code-block:: javascript + + { + "id" : "FishStew", + "ok" : true, + "rev" : "10-561bf6b1e27615cee83d1f48fa65dd3e" + } + +.. _JSON object: #table-couchdb-api-db_db-json-changes +.. _POST: #couchdb-api-dbdoc_db_post diff -Nru couchdb-1.2.0/share/doc/build/html/_sources/api/local.txt couchdb-1.4.0~rc.1/share/doc/build/html/_sources/api/local.txt --- couchdb-1.2.0/share/doc/build/html/_sources/api/local.txt 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_sources/api/local.txt 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,169 @@ +.. Licensed under the Apache License, Version 2.0 (the "License"); you may not +.. use this file except in compliance with the License. You may obtain a copy of +.. the License at +.. +.. http://www.apache.org/licenses/LICENSE-2.0 +.. +.. Unless required by applicable law or agreed to in writing, software +.. distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +.. WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +.. License for the specific language governing permissions and limitations under +.. the License. + +.. _api-local: + +======================================== +Local (non-replicating) Document Methods +======================================== + +The Local (non-replicating) document interface allows you to create +local documents that are not replicated to other databases. These +documents can be used to hold configuration or other information that is +required specifically on the local CouchDB instance. + +Local documents have the following limitations: + +- Local documents are not replicated to other databases. + +- The ID of the local document must be known for the document to + accessed. You cannot obtain a list of local documents from the + database. + +- Local documents are not output by views, or the ``_all_docs`` view. + +Local documents can be used when you want to store configuration or +other information for the current (local) instance of a given database. + +A list of the available methods and URL paths are provided below: + ++--------+-------------------------+-------------------------------------------+ +| Method | Path | Description | ++========+=========================+===========================================+ +| GET | /db/_local/local-doc | Returns the latest revision of the | +| | | non-replicated document | ++--------+-------------------------+-------------------------------------------+ +| PUT | /db/_local/local-doc | Inserts a new version of the | +| | | non-replicated document | ++--------+-------------------------+-------------------------------------------+ +| DELETE | /db/_local/local-doc | Deletes the non-replicated document | ++--------+-------------------------+-------------------------------------------+ +| COPY | /db/_local/local-doc | Copies the non-replicated document | ++--------+-------------------------+-------------------------------------------+ + +``GET /db/_local/local-doc`` +============================ + +* **Method**: ``GET /db/_local/local-doc`` +* **Request**: None +* **Response**: JSON of the returned document +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: rev + + * **Description**: Specify the revision to return + * **Optional**: yes + * **Type**: string + * **Supported Values**: + + * **true**: Includes the revisions + + * **Argument**: revs + + * **Description**: Return a list of the revisions for the document + * **Optional**: yes + * **Type**: boolean + + * **Argument**: revs_info + + * **Description**: Return a list of detailed revision information for + the document + * **Optional**: yes + * **Type**: boolean + * **Supported Values** + + * **true**: Includes the revisions + +* **Return Codes**: + + * **400**: + The format of the request or revision was invalid + * **404**: + The specified document or revision cannot be found, or has been deleted + +Gets the specified local document. The semantics are identical to +accessing a standard document in the specified database, except that the +document is not replicated. See :ref:`api-get-doc`. + +``PUT /db/_local/local-doc`` +============================ + +* **Method**: ``PUT /db/_local/local-doc`` +* **Request**: JSON of the document +* **Response**: JSON with the committed document information +* **Admin Privileges Required**: no +* **Return Codes**: + + * **201**: + Document has been created successfully + +Stores the specified local document. The semantics are identical to +storing a standard document in the specified database, except that the +document is not replicated. See :ref:`api-put-doc`. + +``DELETE /db/_local/local-doc`` +=============================== + +* **Method**: ``DELETE /db/_local/local-doc`` +* **Request**: None +* **Response**: JSON with the deleted document information +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: rev + + * **Description**: Current revision of the document for validation + * **Optional**: yes + * **Type**: string + +* **HTTP Headers** + + * **Header**: ``If-Match`` + + * **Description**: Current revision of the document for validation + * **Optional**: yes + +* **Return Codes**: + + * **409**: + Supplied revision is incorrect or missing + +Deletes the specified local document. The semantics are identical to +deleting a standard document in the specified database, except that the +document is not replicated. See :ref:`api-del-doc`. + +``COPY /db/_local/local-doc`` +============================= + +* **Method**: ``COPY /db/_local/local-doc`` +* **Request**: None +* **Response**: JSON of the copied document +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: rev + + * **Description**: Revision to copy from + * **Optional**: yes + * **Type**: string + +* **HTTP Headers** + + * **Header**: ``Destination`` + + * **Description**: Destination document (and optional revision) + * **Optional**: no + +Copies the specified local document. The semantics are identical to +copying a standard document in the specified database, except that the +document is not replicated. See :ref:`api-copy-doc`. diff -Nru couchdb-1.2.0/share/doc/build/html/_sources/api/misc.txt couchdb-1.4.0~rc.1/share/doc/build/html/_sources/api/misc.txt --- couchdb-1.2.0/share/doc/build/html/_sources/api/misc.txt 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_sources/api/misc.txt 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,867 @@ +.. Licensed under the Apache License, Version 2.0 (the "License"); you may not +.. use this file except in compliance with the License. You may obtain a copy of +.. the License at +.. +.. http://www.apache.org/licenses/LICENSE-2.0 +.. +.. Unless required by applicable law or agreed to in writing, software +.. distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +.. WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +.. License for the specific language governing permissions and limitations under +.. the License. + +.. _api-misc: + +===================== +Miscellaneous Methods +===================== + +The CouchDB Miscellaneous interface provides the basic interface to a +CouchDB server for obtaining CouchDB information and getting and setting +configuration information. + +A list of the available methods and URL paths are provided below: + ++--------+-------------------------+-------------------------------------------+ +| Method | Path | Description | ++========+=========================+===========================================+ +| GET | / | Get the welcome message and version | +| | | information | ++--------+-------------------------+-------------------------------------------+ +| GET | /_active_tasks | Obtain a list of the tasks running in the| +| | | server | ++--------+-------------------------+-------------------------------------------+ +| GET | /_all_dbs | Get a list of all the DBs | ++--------+-------------------------+-------------------------------------------+ +| GET | /_db_updates | A feed of database events | ++--------+-------------------------+-------------------------------------------+ +| GET | /_log | Return the server log file | ++--------+-------------------------+-------------------------------------------+ +| POST | /_replicate | Set or cancel replication | ++--------+-------------------------+-------------------------------------------+ +| POST | /_restart | Restart the server | ++--------+-------------------------+-------------------------------------------+ +| GET | /_stats | Return server statistics | ++--------+-------------------------+-------------------------------------------+ +| GET | /_utils | CouchDB administration interface (Futon) | ++--------+-------------------------+-------------------------------------------+ +| GET | /_uuids | Get generated UUIDs from the server | ++--------+-------------------------+-------------------------------------------+ +| GET | /favicon.ico | Get the site icon | ++--------+-------------------------+-------------------------------------------+ + +``GET /`` +========= + +* **Method**: ``GET /`` +* **Request**: None +* **Response**: Welcome message and version +* **Admin Privileges Required**: no +* **Return Codes**: + + * **200**: + Request completed successfully. + +Accessing the root of a CouchDB instance returns meta information about +the instance. The response is a JSON structure containing information +about the server, including a welcome message and the version of the +server. + +.. code-block:: javascript + + { + "couchdb" : "Welcome", + "version" : "1.0.1" + } + +.. _active-tasks: + +``GET /_active_tasks`` +====================== + +* **Method**: ``GET /_active_tasks`` +* **Request**: None +* **Response**: List of running tasks, including the task type, name, status + and process ID +* **Admin Privileges Required**: yes +* **Return Codes**: + + * **200**: + Request completed successfully. + +You can obtain a list of active tasks by using the ``/_active_tasks`` +URL. The result is a JSON array of the currently running tasks, with +each task being described with a single object. For example: + +.. code-block:: javascript + + [ + { + "pid" : "<0.11599.0>", + "status" : "Copied 0 of 18369 changes (0%)", + "task" : "recipes", + "type" : "Database Compaction" + } + ] + +The returned structure includes the following fields for each task: + +* **tasks** [array]: Active Task + + * **pid**:Process ID + * **status**: Task status message + * **task**: Task name + * **type**: Operation Type + +For operation type, valid values include: + +- ``Database Compaction`` + +- ``Replication`` + +- ``View Group Compaction`` + +- ``View Group Indexer`` + +``GET /_all_dbs`` +================= + +* **Method**: ``GET /_all_dbs`` +* **Request**: None +* **Response**: JSON list of DBs +* **Admin Privileges Required**: no +* **Return Codes**: + + * **200**: + Request completed successfully. + +Returns a list of all the databases in the CouchDB instance. For +example: + +.. code-block:: http + + GET http://couchdb:5984/_all_dbs + Accept: application/json + +The return is a JSON array: + +.. code-block:: javascript + + [ + "_users", + "contacts", + "docs", + "invoices", + "locations" + ] + +``GET /_db_updates`` +==================== + +* **Method**: ``GET /_db_updates`` +* **Request**: None +* **Admin Privileges Required**: yes +* **Query ARguments**: + + * **Argument**: feed + + * **Descroption**: Format of the response feed + * **Optional**: yes + * **Type**: string + * **Default**: longpoll + * **Supported Values**: + + * **longpoll**: Closes the connection after the first event. + * **continuous**: Send a line of JSON per event. Keeps the socket open until ``timeout``. + * **eventsource**: Like, ``continuous``, but sends the events in EventSource format. See http://dev.w3.org/html5/eventsource/ for details, + + * **Argument**: timeout + + * **Descroption**: Number of seconds until CouchDB closes the connection. + * **Optional**: yes + * **Type**: numeric + * **Default**: 60 + + * **Argument**: heartbeat + + * **Descroption**: Whether CouchDB will send a newline character (``\n``) on ``timeout``. + * **Optional**: yes + * **Type**: boolean + * **Default**: true + +* **Return Codes**: + + * **200** + Request completed successfully. + +Returns a list of all database events in the CouchDB instance. + +A database event is one of `created`, `updated`, `deleted`. + +For example: + +.. code-block:: http + + GET http://couchdb:5984/_db_events?feed=continuous + Accept: application/json + +.. code-block:: javascript + + {"dbname":"my-database", "type":"created"} + {"dbname":"my-database", "type":"updated"} + {"dbname":"another-database", "type":"created"} + {"dbname":"my-database", "type":"deleted"} + {"dbname":"another-database", "type":"updated"} + + + +``GET /_log`` +============= + +* **Method**: ``GET /_log`` +* **Request**: None +* **Response**: Log content +* **Admin Privileges Required**: yes +* **Query Arguments**: + + * **Argument**: bytes + + * **Description**: Bytes to be returned + * **Optional**: yes + * **Type**: numeric + * **Default**: 1000 + + * **Argument**: offset + + * **Description**: Offset in bytes where the log tail should be started + * **Optional**: yes + * **Type**: numeric + * **Default**: 0 + +* **Return Codes**: + + * **200**: + Request completed successfully. + +Gets the CouchDB log, equivalent to accessing the local log file of the +corresponding CouchDB instance. + +When you request the log, the response is returned as plain (UTF-8) +text, with an HTTP ``Content-type`` header as ``text/plain``. + +For example, the request: + +.. code-block:: http + + GET http://couchdb:5984/_log + Accept: */* + +The raw text is returned: + +.. code-block:: text + + [Wed, 27 Oct 2010 10:49:42 GMT] [info] [<0.23338.2>] 192.168.0.2 - - 'PUT' /authdb 401 + [Wed, 27 Oct 2010 11:02:19 GMT] [info] [<0.23428.2>] 192.168.0.116 - - 'GET' /recipes/FishStew 200 + [Wed, 27 Oct 2010 11:02:19 GMT] [info] [<0.23428.2>] 192.168.0.116 - - 'GET' /_session 200 + [Wed, 27 Oct 2010 11:02:19 GMT] [info] [<0.24199.2>] 192.168.0.116 - - 'GET' / 200 + [Wed, 27 Oct 2010 13:03:38 GMT] [info] [<0.24207.2>] 192.168.0.116 - - 'GET' /_log?offset=5 200 + +If you want to pick out specific parts of the log information you can +use the ``bytes`` argument, which specifies the number of bytes to be +returned, and ``offset``, which specifies where the reading of the log +should start, counted back from the end. For example, if you use the +following request: + +.. code-block:: http + + GET /_log?bytes=500&offset=2000 + +Reading of the log will start at 2000 bytes from the end of the log, and +500 bytes will be shown. + +.. _replicate: + +``POST /_replicate`` +==================== + +.. todo:: POST /_replicate :: what response is? + +* **Method**: ``POST /_replicate`` +* **Request**: Replication specification +* **Response**: TBD +* **Admin Privileges Required**: yes +* **Query Arguments**: + + * **Argument**: bytes + + * **Description**: Bytes to be returned + * **Optional**: yes + * **Type**: numeric + * **Default**: 1000 + + * **Argument**: offset + + * **Description**: Offset in bytes where the log tail should be started + * **Optional**: yes + * **Type**: numeric + * **Default**: 0 + +* **Return Codes**: + + * **200**: + Replication request successfully completed + * **202**: + Continuous replication request has been accepted + * **404**: + Either the source or target DB is not found + * **500**: + JSON specification was invalid + +Request, configure, or stop, a replication operation. + +The specification of the replication request is controlled through the +JSON content of the request. The JSON should be an object with the +fields defining the source, target and other options. The fields of the +JSON request are shown in the table below: + +* **cancel (optional)**: Cancels the replication +* **continuous (optional)**: Configure the replication to be continuous +* **create_target (optional)**: Creates the target database +* **doc_ids (optional)**: Array of document IDs to be synchronized +* **proxy (optional)**: Address of a proxy server through which replication + should occur +* **source**: Source database name or URL +* **target**: Target database name or URL + +Replication Operation +--------------------- + +The aim of the replication is that at the end of the process, all active +documents on the source database are also in the destination database +and all documents that were deleted in the source databases are also +deleted (if they exist) on the destination database. + +Replication can be described as either push or pull replication: + +- *Pull replication* is where the ``source`` is the remote CouchDB + instance, and the ``destination`` is the local database. + + Pull replication is the most useful solution to use if your source + database has a permanent IP address, and your destination (local) + database may have a dynamically assigned IP address (for example, + through DHCP). This is particularly important if you are replicating + to a mobile or other device from a central server. + +- *Push replication* is where the ``source`` is a local database, and + ``destination`` is a remote database. + +Specifying the Source and Target Database +----------------------------------------- + +You must use the URL specification of the CouchDB database if you want +to perform replication in either of the following two situations: + +- Replication with a remote database (i.e. another instance of CouchDB + on the same host, or a different host) + +- Replication with a database that requires authentication + +For example, to request replication between a database local to the +CouchDB instance to which you send the request, and a remote database +you might use the following request: + +.. code-block:: http + + POST http://couchdb:5984/_replicate + Content-Type: application/json + Accept: application/json + + { + "source" : "recipes", + "target" : "http://coucdb-remote:5984/recipes", + } + + +In all cases, the requested databases in the ``source`` and ``target`` +specification must exist. If they do not, an error will be returned +within the JSON object: + +.. code-block:: javascript + + { + "error" : "db_not_found" + "reason" : "could not open http://couchdb-remote:5984/ol1ka/", + } + +You can create the target database (providing your user credentials +allow it) by adding the ``create_target`` field to the request object: + +.. code-block:: http + + POST http://couchdb:5984/_replicate + Content-Type: application/json + Accept: application/json + + { + "create_target" : true + "source" : "recipes", + "target" : "http://couchdb-remote:5984/recipes", + } + +The ``create_target`` field is not destructive. If the database already +exists, the replication proceeds as normal. + +Single Replication +------------------ + +You can request replication of a database so that the two databases can +be synchronized. By default, the replication process occurs one time and +synchronizes the two databases together. For example, you can request a +single synchronization between two databases by supplying the ``source`` +and ``target`` fields within the request JSON content. + +.. code-block:: http + + POST http://couchdb:5984/_replicate + Content-Type: application/json + Accept: application/json + + { + "source" : "recipes", + "target" : "recipes-snapshot", + } + +In the above example, the databases ``recipes`` and ``recipes-snapshot`` +will be synchronized. These databases are local to the CouchDB instance +where the request was made. The response will be a JSON structure +containing the success (or failure) of the synchronization process, and +statistics about the process: + +.. code-block:: javascript + + { + "ok" : true, + "history" : [ + { + "docs_read" : 1000, + "session_id" : "52c2370f5027043d286daca4de247db0", + "recorded_seq" : 1000, + "end_last_seq" : 1000, + "doc_write_failures" : 0, + "start_time" : "Thu, 28 Oct 2010 10:24:13 GMT", + "start_last_seq" : 0, + "end_time" : "Thu, 28 Oct 2010 10:24:14 GMT", + "missing_checked" : 0, + "docs_written" : 1000, + "missing_found" : 1000 + } + ], + "session_id" : "52c2370f5027043d286daca4de247db0", + "source_last_seq" : 1000 + } + +The structure defines the replication status, as described in the table +below: + +* **history [array]**: Replication History + + * **doc_write_failures**: Number of document write failures + * **docs_read**: Number of documents read + * **docs_written**: Number of documents written to target + * **end_last_seq**: Last sequence number in changes stream + * **end_time**: Date/Time replication operation completed + * **missing_checked**: Number of missing documents checked + * **missing_found**: Number of missing documents found + * **recorded_seq**: Last recorded sequence number + * **session_id**: Session ID for this replication operation + * **start_last_seq**: First sequence number in changes stream + * **start_time**: Date/Time replication operation started + +* **ok**: Replication status +* **session_id**: Unique session ID +* **source_last_seq**: Last sequence number read from source database + +Continuous Replication +---------------------- + +Synchronization of a database with the previously noted methods happens +only once, at the time the replicate request is made. To have the target +database permanently replicated from the source, you must set the +``continuous`` field of the JSON object within the request to true. + +With continuous replication changes in the source database are +replicated to the target database in perpetuity until you specifically +request that replication ceases. + +.. code-block:: http + + POST http://couchdb:5984/_replicate + Content-Type: application/json + Accept: application/json + + { + "continuous" : true + "source" : "recipes", + "target" : "http://couchdb-remote:5984/recipes", + } + +Changes will be replicated between the two databases as long as a +network connection is available between the two instances. + +.. note:: + Two keep two databases synchronized with each other, you need to set + replication in both directions; that is, you must replicate from + ``databasea`` to ``databaseb``, and separately from ``databaseb`` to + ``databasea``. + +Canceling Continuous Replication +-------------------------------- + +You can cancel continuous replication by adding the ``cancel`` field to +the JSON request object and setting the value to true. Note that the +structure of the request must be identical to the original for the +cancellation request to be honoured. For example, if you requested +continuous replication, the cancellation request must also contain the +``continuous`` field. + +For example, the replication request: + +.. code-block:: http + + POST http://couchdb:5984/_replicate + Content-Type: application/json + Accept: application/json + + { + "source" : "recipes", + "target" : "http://couchdb-remote:5984/recipes", + "create_target" : true, + "continuous" : true + } + +Must be canceled using the request: + +.. code-block:: http + + POST http://couchdb:5984/_replicate + Content-Type: application/json + Accept: application/json + + { + "cancel" : true, + "continuous" : true + "create_target" : true, + "source" : "recipes", + "target" : "http://couchdb-remote:5984/recipes", + } + +Requesting cancellation of a replication that does not exist results in +a 404 error. + +``POST /_restart`` +================== + +* **Method**: ``POST /_restart`` +* **Request**: None +* **Response**: JSON status message +* **Admin Privileges Required**: yes +* **HTTP Headers**: + + * **Header**: ``Content-Type`` + + * **Description**: Request content type + * **Optional**: no + * **Value**: :mimetype:`application/json` + +* **Return Codes**: + + * **200**: + Replication request successfully completed + +Restarts the CouchDB instance. You must be authenticated as a user with +administration privileges for this to work. + +For example: + +.. code-block:: http + + POST http://admin:password@couchdb:5984/_restart + +The return value (if the server has not already restarted) is a JSON +status object indicating that the request has been received: + +.. code-block:: javascript + + { + "ok" : true, + } + +If the server has already restarted, the header may be returned, but no +actual data is contained in the response. + +``GET /_stats`` +=============== + +* **Method**: ``GET /_stats`` +* **Request**: None +* **Response**: Server statistics +* **Admin Privileges Required**: no +* **Return Codes**: + + * **200**: + Request completed successfully. + +The ``_stats`` method returns a JSON object containing the statistics +for the running server. The object is structured with top-level sections +collating the statistics for a range of entries, with each individual +statistic being easily identified, and the content of each statistic is +self-describing. For example, the request time statistics, within the +``couchdb`` section are structured as follows: + +.. code-block:: javascript + + { + "couchdb" : { + ... + "request_time" : { + "stddev" : "27.509", + "min" : "0.333333333333333", + "max" : "152", + "current" : "400.976", + "mean" : "10.837", + "sum" : "400.976", + "description" : "length of a request inside CouchDB without MochiWeb" + }, + ... + } + } + + +The fields provide the current, minimum and maximum, and a collection of +statistical means and quantities. The quantity in each case is not +defined, but the descriptions below provide + +The statistics are divided into the following top-level sections: + +- ``couchdb``: Describes statistics specific to the internals of CouchDB. + + +-------------------------+-------------------------------------------------------+----------------+ + | Statistic ID | Description | Unit | + +=========================+=======================================================+================+ + | ``auth_cache_hits`` | Number of authentication cache hits | number | + +-------------------------+-------------------------------------------------------+----------------+ + | ``auth_cache_misses`` | Number of authentication cache misses | number | + +-------------------------+-------------------------------------------------------+----------------+ + | ``database_reads`` | Number of times a document was read from a database | number | + +-------------------------+-------------------------------------------------------+----------------+ + | ``database_writes`` | Number of times a database was changed | number | + +-------------------------+-------------------------------------------------------+----------------+ + | ``open_databases`` | Number of open databases | number | + +-------------------------+-------------------------------------------------------+----------------+ + | ``open_os_files`` | Number of file descriptors CouchDB has open | number | + +-------------------------+-------------------------------------------------------+----------------+ + | ``request_time`` | Length of a request inside CouchDB without MochiWeb | milliseconds | + +-------------------------+-------------------------------------------------------+----------------+ + +- ``httpd_request_methods`` + + +----------------+----------------------------------+----------+ + | Statistic ID | Description | Unit | + +================+==================================+==========+ + | ``COPY`` | Number of HTTP COPY requests | number | + +----------------+----------------------------------+----------+ + | ``DELETE`` | Number of HTTP DELETE requests | number | + +----------------+----------------------------------+----------+ + | ``GET`` | Number of HTTP GET requests | number | + +----------------+----------------------------------+----------+ + | ``HEAD`` | Number of HTTP HEAD requests | number | + +----------------+----------------------------------+----------+ + | ``POST`` | Number of HTTP POST requests | number | + +----------------+----------------------------------+----------+ + | ``PUT`` | Number of HTTP PUT requests | number | + +----------------+----------------------------------+----------+ + +- ``httpd_status_codes`` + + +----------------+------------------------------------------------------+----------+ + | Statistic ID | Description | Unit | + +================+======================================================+==========+ + | ``200`` | Number of HTTP 200 OK responses | number | + +----------------+------------------------------------------------------+----------+ + | ``201`` | Number of HTTP 201 Created responses | number | + +----------------+------------------------------------------------------+----------+ + | ``202`` | Number of HTTP 202 Accepted responses | number | + +----------------+------------------------------------------------------+----------+ + | ``301`` | Number of HTTP 301 Moved Permanently responses | number | + +----------------+------------------------------------------------------+----------+ + | ``304`` | Number of HTTP 304 Not Modified responses | number | + +----------------+------------------------------------------------------+----------+ + | ``400`` | Number of HTTP 400 Bad Request responses | number | + +----------------+------------------------------------------------------+----------+ + | ``401`` | Number of HTTP 401 Unauthorized responses | number | + +----------------+------------------------------------------------------+----------+ + | ``403`` | Number of HTTP 403 Forbidden responses | number | + +----------------+------------------------------------------------------+----------+ + | ``404`` | Number of HTTP 404 Not Found responses | number | + +----------------+------------------------------------------------------+----------+ + | ``405`` | Number of HTTP 405 Method Not Allowed responses | number | + +----------------+------------------------------------------------------+----------+ + | ``409`` | Number of HTTP 409 Conflict responses | number | + +----------------+------------------------------------------------------+----------+ + | ``412`` | Number of HTTP 412 Precondition Failed responses | number | + +----------------+------------------------------------------------------+----------+ + | ``500`` | Number of HTTP 500 Internal Server Error responses | number | + +----------------+------------------------------------------------------+----------+ + +- ``httpd`` + + +----------------------------------+----------------------------------------------+----------+ + | Statistic ID | Description | Unit | + +==================================+==============================================+==========+ + | ``bulk_requests`` | Number of bulk requests | number | + +----------------------------------+----------------------------------------------+----------+ + | ``clients_requesting_changes`` | Number of clients for continuous _changes | number | + +----------------------------------+----------------------------------------------+----------+ + | ``requests`` | Number of HTTP requests | number | + +----------------------------------+----------------------------------------------+----------+ + | ``temporary_view_reads`` | Number of temporary view reads | number | + +----------------------------------+----------------------------------------------+----------+ + | ``view_reads`` | Number of view reads | number | + +----------------------------------+----------------------------------------------+----------+ + +You can also access individual statistics by quoting the statistics +sections and statistic ID as part of the URL path. For example, to get +the ``request_time`` statistics, you can use: + +.. code-block:: http + + GET /_stats/couchdb/request_time + +This returns an entire statistics object, as with the full request, but +containing only the request individual statistic. Hence, the returned +structure is as follows: + +.. code-block:: javascript + + { + "couchdb" : { + "request_time" : { + "stddev" : 7454.305, + "min" : 1, + "max" : 34185, + "current" : 34697.803, + "mean" : 1652.276, + "sum" : 34697.803, + "description" : "length of a request inside CouchDB without MochiWeb" + } + } + } + + +``GET /_utils`` +=============== + +* **Method**: ``GET /_utils`` +* **Request**: None +* **Response**: Administration interface +* **Admin Privileges Required**: no + +Accesses the built-in Futon administration interface for CouchDB. + +``GET /_uuids`` +=============== + +* **Method**: ``GET /_uuids`` +* **Request**: None +* **Response**: List of UUIDs +* **Admin Privileges Required**: no +* **Query Arguments**: + + * **Argument**: count + + * **Description**: Number of UUIDs to return + * **Optional**: yes + * **Type**: numeric + +* **Return Codes**: + + * **200**: + Request completed successfully. + +Requests one or more Universally Unique Identifiers (UUIDs) from the +CouchDB instance. The response is a JSON object providing a list of +UUIDs. For example: + +.. code-block:: javascript + + { + "uuids" : [ + "7e4b5a14b22ec1cf8e58b9cdd0000da3" + ] + } + +You can use the ``count`` argument to specify the number of UUIDs to be +returned. For example: + +.. code-block:: http + + GET http://couchdb:5984/_uuids?count=5 + +Returns: + +.. code-block:: javascript + + { + "uuids" : [ + "c9df0cdf4442f993fc5570225b405a80", + "c9df0cdf4442f993fc5570225b405bd2", + "c9df0cdf4442f993fc5570225b405e42", + "c9df0cdf4442f993fc5570225b4061a0", + "c9df0cdf4442f993fc5570225b406a20" + ] + } + +The UUID type is determined by the UUID type setting in the CouchDB +configuration. See :ref:`api-put-config`. + +For example, changing the UUID type to ``random``: + +.. code-block:: http + + PUT http://couchdb:5984/_config/uuids/algorithm + Content-Type: application/json + Accept: */* + + "random" + +When obtaining a list of UUIDs: + +.. code-block:: javascript + + { + "uuids" : [ + "031aad7b469956cf2826fcb2a9260492", + "6ec875e15e6b385120938df18ee8e496", + "cff9e881516483911aa2f0e98949092d", + "b89d37509d39dd712546f9510d4a9271", + "2e0dbf7f6c4ad716f21938a016e4e59f" + ] + } + +``GET /favicon.ico`` +==================== + +* **Method**: ``GET /favicon.ico`` +* **Request**: None +* **Response**: Binary content for the `favicon.ico` site icon +* **Admin Privileges Required**: no +* **Return Codes**: + + * **200**: + Request completed successfully. + * **404**: + The requested content could not be found. The returned content will include + further information, as a JSON object, if available. + +Returns the site icon. The return ``Content-Type`` header is +:mimetype:`image/x-icon`, and the content stream is the image data. diff -Nru couchdb-1.2.0/share/doc/build/html/_sources/api/reference.txt couchdb-1.4.0~rc.1/share/doc/build/html/_sources/api/reference.txt --- couchdb-1.2.0/share/doc/build/html/_sources/api/reference.txt 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_sources/api/reference.txt 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,42 @@ +.. Licensed under the Apache License, Version 2.0 (the "License"); you may not +.. use this file except in compliance with the License. You may obtain a copy of +.. the License at +.. +.. http://www.apache.org/licenses/LICENSE-2.0 +.. +.. Unless required by applicable law or agreed to in writing, software +.. distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +.. WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +.. License for the specific language governing permissions and limitations under +.. the License. + +.. _api-overview: + +API Reference +============= + +The components of the API URL path help determine the part of the +CouchDB server that is being accessed. The result is the structure of +the URL request both identifies and effectively describes the area of +the database you are accessing. + +As with all URLs, the individual components are separated by a forward +slash. + +As a general rule, URL components and JSON fields starting with the +``_`` (underscore) character represent a special component or entity +within the server or returned object. For example, the URL fragment +``/_all_dbs`` gets a list of all of the databases in a CouchDB instance. + +This reference is structured according to the URL structure, as below. + +.. toctree:: + :maxdepth: 2 + + database + documents + local + design + misc + configuration + authn diff -Nru couchdb-1.2.0/share/doc/build/html/_sources/api-basics.txt couchdb-1.4.0~rc.1/share/doc/build/html/_sources/api-basics.txt --- couchdb-1.2.0/share/doc/build/html/_sources/api-basics.txt 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_sources/api-basics.txt 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,463 @@ +.. Licensed under the Apache License, Version 2.0 (the "License"); you may not +.. use this file except in compliance with the License. You may obtain a copy of +.. the License at +.. +.. http://www.apache.org/licenses/LICENSE-2.0 +.. +.. Unless required by applicable law or agreed to in writing, software +.. distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +.. WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +.. License for the specific language governing permissions and limitations under +.. the License. + +.. _api-basics: + +========== +API Basics +========== + +The CouchDB API is the primary method of interfacing to a CouchDB +instance. Requests are made using HTTP and requests are used to request +information from the database, store new data, and perform views and +formatting of the information stored within the documents. + +Requests to the API can be categorised by the different areas of the +CouchDB system that you are accessing, and the HTTP method used to send +the request. Different methods imply different operations, for example +retrieval of information from the database is typically handled by the +``GET`` operation, while updates are handled by either a ``POST`` or +``PUT`` request. There are some differences between the information that +must be supplied for the different methods. For a guide to the basic +HTTP methods and request structure, see :ref:`api-format`. + +For nearly all operations, the submitted data, and the returned data +structure, is defined within a JavaScript Object Notation (JSON) object. +Basic information on the content and data types for JSON are provided in +:ref:`json`. + +Errors when accessing the CouchDB API are reported using standard HTTP +Status Codes. A guide to the generic codes returned by CouchDB are +provided in :ref:`errors`. + +When accessing specific areas of the CouchDB API, specific information +and examples on the HTTP methods and request, JSON structures, and error +codes are provided. For a guide to the different areas of the API, see +:ref:`api-overview`. + +.. _api-format: + +Request Format and Responses +============================ + +CouchDB supports the following HTTP request methods: + +- ``GET`` + + Request the specified item. As with normal HTTP requests, the format + of the URL defines what is returned. With CouchDB this can include + static items, database documents, and configuration and statistical + information. In most cases the information is returned in the form of + a JSON document. + +- ``HEAD`` + + The ``HEAD`` method is used to get the HTTP header of a ``GET`` + request without the body of the response. + +- ``POST`` + + Upload data. Within CouchDB ``POST`` is used to set values, including + uploading documents, setting document values, and starting certain + administration commands. + +- ``PUT`` + + Used to put a specified resource. In CouchDB ``PUT`` is used to + create new objects, including databases, documents, views and design + documents. + +- ``DELETE`` + + Deletes the specified resource, including documents, views, and + design documents. + +- ``COPY`` + + A special method that can be used to copy documents and objects. + +If you use the an unsupported HTTP request type with a URL that does not +support the specified type, a 405 error will be returned, listing the +supported HTTP methods. For example: + +.. code-block:: javascript + + { + "error":"method_not_allowed", + "reason":"Only GET,HEAD allowed" + } + + +The CouchDB design document API and the functions when returning HTML +(for example as part of a show or list) enables you to include custom +HTTP headers through the ``headers`` block of the return object. + +HTTP Headers +============ + +Because CouchDB uses HTTP for all communication, you need to ensure that +the correct HTTP headers are supplied (and processed on retrieval) so +that you get the right format and encoding. Different environments and +clients will be more or less strict on the effect of these HTTP headers +(especially when not present). Where possible you should be as specific +as possible. + +Request Headers +--------------- + +- ``Content-type`` + + Specifies the content type of the information being supplied within + the request. The specification uses MIME type specifications. For the + majority of requests this will be JSON (``application/json``). For + some settings the MIME type will be plain text. When uploading + attachments it should be the corresponding MIME type for the + attachment or binary (``application/octet-stream``). + + The use of the ``Content-type`` on a request is highly recommended. + +- ``Accept`` + + Specifies the list of accepted data types to be returned by the + server (i.e. that are accepted/understandable by the client). The + format should be a list of one or more MIME types, separated by + colons. + + For the majority of requests the definition should be for JSON data + (``application/json``). For attachments you can either specify the + MIME type explicitly, or use ``*/*`` to specify that all file types + are supported. If the ``Accept`` header is not supplied, then the + ``*/*`` MIME type is assumed (i.e. client accepts all formats). + + The use of ``Accept`` in queries for CouchDB is not required, but is + highly recommended as it helps to ensure that the data returned can + be processed by the client. + + If you specify a data type using the ``Accept`` header, CouchDB will + honor the specified type in the ``Content-type`` header field + returned. For example, if you explicitly request ``application/json`` + in the ``Accept`` of a request, the returned HTTP headers will use + the value in the returned ``Content-type`` field. + + For example, when sending a request without an explicit ``Accept`` + header, or when specifying ``*/*``: + + .. code-block:: http + + GET /recipes HTTP/1.1 + Host: couchdb:5984 + Accept: */* + + The returned headers are: + + .. code-block:: http + + Server: CouchDB/1.0.1 (Erlang OTP/R13B) + Date: Thu, 13 Jan 2011 13:39:34 GMT + Content-Type: text/plain;charset=utf-8 + Content-Length: 227 + Cache-Control: must-revalidate + + Note that the returned content type is ``text/plain`` even though the + information returned by the request is in JSON format. + + Explicitly specifying the ``Accept`` header: + + .. code-block:: http + + GET /recipes HTTP/1.1 + Host: couchdb:5984 + Accept: application/json + + The headers returned include the ``application/json`` content type: + + .. code-block:: http + + Server: CouchDB/|version| (Erlang OTP/R13B) + Date: Thu, 13 Jan 2011 13:40:11 GMT + Content-Type: application/json + Content-Length: 227 + Cache-Control: must-revalidate + +Response Headers +---------------- + +Response headers are returned by the server when sending back content +and include a number of different header fields, many of which are +standard HTTP response header and have no significance to CouchDB +operation. The list of response headers important to CouchDB are listed +below. + +- ``Content-type`` + + Specifies the MIME type of the returned data. For most request, the + returned MIME type is ``text/plain``. All text is encoded in Unicode + (UTF-8), and this is explicitly stated in the returned + ``Content-type``, as ``text/plain;charset=utf-8``. + +- ``Cache-control`` + + The cache control HTTP response header provides a suggestion for + client caching mechanisms on how to treat the returned information. + CouchDB typically returns the ``must-revalidate``, which indicates + that the information should be revalidated if possible. This is used + to ensure that the dynamic nature of the content is correctly + updated. + +- ``Content-length`` + + The length (in bytes) of the returned content. + +- ``Etag`` + + The ``Etag`` HTTP header field is used to show the revision for a + document, or a view. + + ETags have been assigned to a map/reduce group (the collection of + views in a single design document). Any change to any of the indexes + for those views would generate a new ETag for all view URL's in a + single design doc, even if that specific view's results had not + changed. + + Each ``_view`` URL has its own ETag which only gets updated when + changes are made to the database that effect that index. If the + index for that specific view does not change, that view keeps the + original ETag head (therefore sending back 304 Not Modified more + often). + +.. _json: + +JSON Basics +=========== + +The majority of requests and responses to CouchDB use the JavaScript +Object Notation (JSON) for formatting the content and structure of the +data and responses. + +JSON is used because it is the simplest and easiest to use solution for +working with data within a web browser, as JSON structures can be +evaluated and used as JavaScript objects within the web browser +environment. JSON also integrates with the server-side JavaScript used +within CouchDB. + +JSON supports the same basic types as supported by JavaScript, these +are: + +- Number (either integer or floating-point). + +- String; this should be enclosed by double-quotes and supports Unicode + characters and backslash escaping. For example: + + .. code-block:: javascript + + "A String" + +- Boolean - a ``true`` or ``false`` value. You can use these strings + directly. For example: + + .. code-block:: javascript + + { "value": true} + +- Array - a list of values enclosed in square brackets. For example: + + .. code-block:: javascript + + ["one", "two", "three"] + +- Object - a set of key/value pairs (i.e. an associative array, or + hash). The key must be a string, but the value can be any of the + supported JSON values. For example: + + .. code-block:: javascript + + { + "servings" : 4, + "subtitle" : "Easy to make in advance, and then cook when ready", + "cooktime" : 60, + "title" : "Chicken Coriander" + } + + + In CouchDB, the JSON object is used to represent a variety of + structures, including the main CouchDB document. + +Parsing JSON into a JavaScript object is supported through the +``JSON.parse()`` function in JavaScript, or through various libraries that +will perform the parsing of the content into a JavaScript object for +you. Libraries for parsing and generating JSON are available in many +languages, including Perl, Python, Ruby, Erlang and others. + +.. warning:: + Care should be taken to ensure that your JSON structures are + valid, invalid structures will cause CouchDB to return an HTTP status code + of 500 (server error). + +.. _errors: + +HTTP Status Codes +================= + +With the interface to CouchDB working through HTTP, error codes and +statuses are reported using a combination of the HTTP status code +number, and corresponding data in the body of the response data. + +A list of the error codes returned by CouchDB, and generic descriptions +of the related errors are provided below. The meaning of different +status codes for specific request types are provided in the +corresponding API call reference. + +- ``200 - OK`` + + Request completed successfully. + +- ``201 - Created`` + + Document created successfully. + +- ``202 - Accepted`` + + Request has been accepted, but the corresponding operation may not + have completed. This is used for background operations, such as + database compaction. + +- ``304 - Not Modified`` + + The additional content requested has not been modified. This is used + with the ETag system to identify the version of information returned. + +- ``400 - Bad Request`` + + Bad request structure. The error can indicate an error with the + request URL, path or headers. Differences in the supplied MD5 hash + and content also trigger this error, as this may indicate message + corruption. + +- ``401 - Unauthorized`` + + The item requested was not available using the supplied + authorization, or authorization was not supplied. + +- ``403 - Forbidden`` + + The requested item or operation is forbidden. + +- ``404 - Not Found`` + + The requested content could not be found. The content will include + further information, as a JSON object, if available. The structure + will contain two keys, ``error`` and ``reason``. For example: + + .. code-block:: javascript + + {"error":"not_found","reason":"no_db_file"} + +- ``405 - Resource Not Allowed`` + + A request was made using an invalid HTTP request type for the URL + requested. For example, you have requested a ``PUT`` when a ``POST`` + is required. Errors of this type can also triggered by invalid URL + strings. + +- ``406 - Not Acceptable`` + + The requested content type is not supported by the server. + +- ``409 - Conflict`` + + Request resulted in an update conflict. + +- ``412 - Precondition Failed`` + + The request headers from the client and the capabilities of the + server do not match. + +- ``415 - Bad Content Type`` + + The content types supported, and the content type of the information + being requested or submitted indicate that the content type is not + supported. + +- ``416 - Requested Range Not Satisfiable`` + + The range specified in the request header cannot be satisfied by the + server. + +- ``417 - Expectation Failed`` + + When sending documents in bulk, the bulk load operation failed. + +- ``500 - Internal Server Error`` + + The request was invalid, either because the supplied JSON was + invalid, or invalid information was supplied as part of the request. + +HTTP Range Requests +=================== + +HTTP allows you to specify byte ranges for requests. This allows the +implementation of resumable downloads and skippable audio and video +streams alike. This is available for all attachments inside CouchDB. + +This is just a real quick run through how this looks under the hood. +Usually, you will have larger binary files to serve from CouchDB, like +MP3s and videos, but to make things a little more obvious, I use a text +file here (Note that I use the ``application/octet-stream`` Content-Type +instead of ``text/plain``). + +.. code-block:: bash + + shell> cat file.txt + My hovercraft is full of eels! + +Now let's store this text file as an attachment in CouchDB. First, we +create a database: + +.. code-block:: bash + + shell> curl -X PUT http://127.0.0.1:5984/test + {"ok":true} + +Then we create a new document and the file attachment in one go: + +.. code-block:: bash + + shell> curl -X PUT http://127.0.0.1:5984/test/doc/file.txt \ + -H "Content-Type: application/octet-stream" -d@file.txt + {"ok":true,"id":"doc","rev":"1-287a28fa680ae0c7fb4729bf0c6e0cf2"} + +Now we can request the whole file easily: + +.. code-block:: bash + + shell> curl -X GET http://127.0.0.1:5984/test/doc/file.txt + My hovercraft is full of eels! + +But say we only want the first 13 bytes: + +.. code-block:: bash + + shell> curl -X GET http://127.0.0.1:5984/test/doc/file.txt \ + -H "Range: bytes=0-12" + My hovercraft + +HTTP supports many ways to specify single and even multiple byte +ranges. Read all about it in `RFC 2616`_. + +.. note:: + Databases that have been created with CouchDB 1.0.2 or earlier will + support range requests in |version|, but they are using a less-optimal + algorithm. If you plan to make heavy use of this feature, make sure + to compact your database with CouchDB |version| to take advantage of a + better algorithm to find byte ranges. + +.. _RFC 2616: http://tools.ietf.org/html/rfc2616#section-14.27 diff -Nru couchdb-1.2.0/share/doc/build/html/_sources/changelog.txt couchdb-1.4.0~rc.1/share/doc/build/html/_sources/changelog.txt --- couchdb-1.2.0/share/doc/build/html/_sources/changelog.txt 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_sources/changelog.txt 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,1489 @@ +.. Licensed under the Apache License, Version 2.0 (the "License"); you may not +.. use this file except in compliance with the License. You may obtain a copy of +.. the License at +.. +.. http://www.apache.org/licenses/LICENSE-2.0 +.. +.. Unless required by applicable law or agreed to in writing, software +.. distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +.. WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +.. License for the specific language governing permissions and limitations under +.. the License. + +Release History +*************** + +.. contents:: + :depth: 1 + :local: + +1.4.x Branch +============ + +.. contents:: + :depth: 1 + :local: + +Upgrade Notes +------------- + +We now support Erlang/OTP R16B and R16B01; the minimum required version is R14B. + +User document role values must now be strings. Other types of values will be +refused when saving the user document. + +Version 1.4.0 +------------- + +* :issue:`1684`: Support for server-wide changes feed reporting on creation, + updates and deletion of databases. :commit:`917d8988` +* :issue:`1139`: it's possible to apply :ref:`list ` + functions to ``_all_docs`` view. :commit:`54fd258e` +* Automatic loading of CouchDB plugins. :commit:`3fab6bb5` +* :issue:`1634`: Reduce PBKDF2 work factor. :commit:`f726bc4d` +* Allow storing pre-hashed admin passwords via `_config` API. + :commit:`c98ba561` +* :issue:`1772`: Prevent invalid JSON output when using `all_or_nothing` + `_bulk_docs` API. :commit:`dfd39d57` +* Add a :ref:`configurable whitelist ` of user + document properties. :commit:`8d7ab8b1` +* :issue:`1852`: Support Last-Event-ID header in EventSource changes feeds. + :commit:`dfd2199a` +* Much improved documentation, including an :ref:`expanded description + ` of `validate_doc_update` functions (commit:`ef9ac469`) and + a description of how CouchDB handles JSON :ref:`number values + ` (:commit:`bbd93f77`). +* :issue:`1632`: Ignore epilogues in multipart/related MIME attachments. + :commit:`2b4ab67a` +* Split up replicator_db tests into multiple independent tests. + +1.3.x Branch +============ + +.. contents:: + :depth: 1 + :local: + +Upgrade Notes +------------- + +You can upgrade your existing CouchDB 1.0.x installation to 1.3.0 +without any specific steps or migration. When you run CouchDB, the +existing data and index files will be opened and used as normal. + +The first time you run a compaction routine on your database within 1.3.0, +the data structure and indexes will be updated to the new version of the +CouchDB database format that can only be read by CouchDB 1.3.0 and later. +This step is not reversible. Once the data files have been updated and +migrated to the new version the data files will no longer work with a +CouchDB 1.0.x release. + +.. warning:: + If you want to retain support for opening the data files in + CouchDB 1.0.x you must back up your data files before performing the + upgrade and compaction process. + +Version 1.3.1 +------------- + +Replicator +^^^^^^^^^^ + +* :issue:`1788`: Tolerate missing source and target fields in _replicator docs. + :commit:`869f42e2` + +Log System +^^^^^^^^^^ + +* Don't log about missing .compact files. :commit:`06f1a8dc` +* :issue:`1794`: Fix bug in WARN level logging from 1.3.0. + +View Server +^^^^^^^^^^^ + +* :issue:`1792`: Fix the -S option to couchjs to increase memory limits. + :commit:`cfaa66cd` + +Miscellaneous +^^^^^^^^^^^^^ + +* Improve documentation: better structure, improve language, less duplication. +* :issue:`1784`: Improvements to test suite and VPATH build system. + :commit:`01afaa4f` + +Version 1.3.0 +------------- + +Database core +^^^^^^^^^^^^^ + +* :issue:`1512`: Validate bind address before assignment. :commit:`09ead8a0` +* Restore ``max_document_size`` protection. :commit:`bf1eb135` + +Documentation +^^^^^^^^^^^^^ + +* :issue:`1523`: Import CouchBase documentation and convert them into + `Sphinx docs `_ + +Futon +^^^^^ + +* :issue:`1470`: Futon raises popup on attempt to navigate to missed/deleted + document. :commit:`5da40eef` +* :issue:`1383`: Futon view editor won't allow you to save original view after + saving a revision. :commit:`ce48342` +* :issue:`627`: Support all timezones. :commit:`b1a049bb` +* :issue:`509`: Added view request duration to Futon. :commit:`2d2c7d1e` +* :issue:`1473`, :issue:`1472`: Disable buttons for actions that the user + doesn't have permissions to. :commit:`7156254d` + +HTTP Interface +^^^^^^^^^^^^^^^^^ + +* :issue:`431`: Introduce experimental :ref:`CORS support `. + :commit:`b90e4021` +* :issue:`1537`: Include user name in show/list `ETags`. :commit:`ac320479` +* :issue:`1511`: CouchDB checks `roles` field for `_users` database documents + with more care. :commit:`41205000` +* :issue:`1502`: Allow users to delete own _users doc. :commit:`f0d6f19bc8` +* :issue:`1501`: :ref:`Changes feed ` now can take special parameter + ``since=now`` to emit changes since current point of time. :commit:`3bbb2612` +* :issue:`1442`: No longer rewrites the `X-CouchDB-Requested-Path` during + recursive calls to the rewriter. :commit:`56744f2f` +* :issue:`1441`: Limit recursion depth in the URL rewriter. + Defaults to a maximum of 100 invocations but is configurable. + :commit:`d076976c` +* :issue:`1381`: Add jquery.couch support for Windows 8 Metro apps. + :commit:`dfc5d37c` +* :issue:`1337`: Use MD5 for attachment ETag header value. :commit:`6d912c9f` +* :issue:`1321`: Variables in rewrite rules breaks OAuth authentication. + :commit:`c307ba95` +* :issue:`1285`: Allow configuration of vendor and modules version in CouchDB + welcome message. :commit:`3c24a94d` +* :issue:`1277`: Better query parameter support and code clarity: + :commit:`7e3c69ba` + + * Responses to documents created/modified via form data `POST` to /db/doc or + copied with `COPY` should now include `Location` header. + * Form data POST to /db/doc now includes an `ETag` response header. + * ``?batch=ok`` is now supported for `COPY` and `POST` /db/doc updates. + * ``?new_edits=false`` is now supported for more operations. + +* :issue:`1210`: Files starting with underscore can be attached and updated now. + :commit:`05858792` +* :issue:`1097`: Allow `OPTIONS` request to shows and lists functions. + :commit:`9f53704a` +* :issue:`1026`: Database names are encoded with respect of special characters + in the rewriter now. :commit:`272d6415` +* :issue:`986`: Added Server-Sent Events protocol to db changes API. + See http://www.w3.org/TR/eventsource/ for details. :commit:`093d2aa6` +* :issue:`887`: Fix ``bytes`` and ``offset`` parameters semantic for `_log` + resource (`explanation `_) + :commit:`ad700014` +* :issue:`764`, :issue:`514`, :issue:`430`: Fix sending HTTP headers from + ``_list`` function, :commit:`2a74f88375` +* Send a 202 response for `_restart`. :commit:`b213e16f` +* Make password hashing synchronous when using the /_config/admins API. + :commit:`08071a80` +* Fix `_session` for IE7. +* Return ``X-Couch-Id`` header if doc is created, :commit:`98515bf0b9` +* Allow any 2xx code to indicate success, :commit:`0d50103cfd` +* Restore 400 error for empty PUT, :commit:`2057b895` +* Add support to serve single file with CouchDB, :commit:`2774531ff2` +* Support auth cookies with ``:`` characters, :commit:`d9566c831d` + +Log System +^^^^^^^^^^ + +* :issue:`1380`: Minor fixes for logrotate support. +* Improve file I/O error logging and handling, :commit:`4b6475da` +* Module Level Logging, :commit:`b58f069167` +* Log 5xx responses at error level, :commit:`e896b0b7` +* Log problems opening database at ERROR level except for auto-created + system dbs, :commit:`41667642f7` + +Replicator +^^^^^^^^^^ + +* :issue:`1557`: Upgrade some code to use BIFs bring good improvements for + replication. +* :issue:`1363`: Fix rarely occurred, but still race condition in changes feed + if a quick burst of changes happens while replication is starting the + replication can go stale. :commit:`573a7bb9` +* :issue:`1323`: Replicator now acts as standalone application. + :commit:`f913ca6e` +* :issue:`1259`: Stabilize replication id, :commit:`c6252d6d7f` +* :issue:`1248`: `HTTP 500` error now doesn't occurs when replicating with + ``?doc_ids=null``. :commit:`bea76dbf` + +Security +^^^^^^^^ + +* :issue:`1060`: Passwords are now hashed using the PBKDF2 algorithm with a + configurable work factor. :commit:`7d418134` + +Source Repository +^^^^^^^^^^^^^^^^^ + +* The source repository was migrated from `SVN`_ to `Git`_. + +.. _SVN: https://svn.apache.org/repos/asf/couchdb +.. _Git: https://git-wip-us.apache.org/repos/asf/couchdb.git + +Storage System +^^^^^^^^^^^^^^ + +* Fixed unnecessary conflict when deleting and creating a + document in the same batch. + +Test Suite +^^^^^^^^^^ + +* :issue:`1563`: Ensures urlPrefix is set in all ajax requests. + :commit:`07a6af222` +* :issue:`1389`: Improved tracebacks printed by the JS CLI tests. +* :issue:`1339`: Use shell trap to catch dying beam processes during test runs. + :commit:`2921c78` +* :issue:`1338`: Start CouchDB with ``port=0``. While CouchDB might be already + running on the default port 5984, port number 0 let the TCP stack figure out a + free port to run. :commit:`127cbe3` +* :issue:`1321`: Moved the JS test suite to the CLI. +* Improved the reliability of a number of tests. +* Fix race condition for test running on faster hardware. + +URL Rewriter & Vhosts +^^^^^^^^^^^^^^^^^^^^^ + +* :issue:`1026`: Database name is encoded during rewriting + (allowing embedded /'s, etc). :commit:`272d6415` + +UUID Algorithms +^^^^^^^^^^^^^^^ + +* :issue:`1373`: Added the utc_id algorithm :commit:`5ab712a2` + +Query and View Server +^^^^^^^^^^^^^^^^^^^^^ + +* :issue:`1491`: Clenaup view tables. :commit:`c37204b7` +* :issue:`1483`: Update handlers requires valid doc ids. :commit:`72ea7e38` +* :issue:`1445`: CouchDB tries no more to delete view file if it couldn't open + it, even if the error is `emfile`. +* :issue:`1444`: Fix missed_named_view error that occurs on existed design + documents and views. :commit:`b59ac98b` +* :issue:`1372`: `_stats` builtin reduce function no longer produces error for + empty view result. +* :issue:`410`: More graceful error handling for JavaScript validate_doc_update + functions. +* :issue:`111`: Improve the errors reported by the javascript view server + to provide a more friendly error report when something goes wrong. + :commit:`0c619ed` +* Deprecate E4X support, :commit:`cdfdda2314` + +Windows +^^^^^^^ + +* :issue:`1482`: Use correct linker flang to build `snappy_nif.dll` on Windows. + :commit:`a6eaf9f1` +* Allows building cleanly on Windows without cURL, :commit:`fb670f5712` + +1.2.x Branch +============ + +.. contents:: + :depth: 1 + :local: + +Upgrade Notes +------------- + +.. warning:: + + This version drops support for the database format that was introduced in + version 0.9.0. Compact your older databases (that have not been compacted + for a long time) before upgrading, or they will become inaccessible. + +Security changes +^^^^^^^^^^^^^^^^ + +The interface to the ``_users`` and ``_replicator`` databases have been +changed so that non-administrator users can see less information: + +* In the ``_users`` database: + + * User documents can now only be read by the respective users, as well as + administrators. Other users cannot read these documents. + * Views can only be defined and queried by administrator users. + * The ``_changes`` feed can only be queried by administrator users. + +* In the ``_replicator`` database: + + * Documents now have a forced ``owner`` field that corresponds to the + authenticated user that created them. + * Non-owner users will not see confidential information like passwords or + OAuth tokens in replication documents; they can still see the other + contents of those documents. Administrators can see everything. + * Views can only be defined and queried by administrators. + +Database Compression +^^^^^^^^^^^^^^^^^^^^ + +The new optional (but enabled by default) compression of disk files requires +an upgrade of the on-disk format (5 -> 6) which occurs on creation for new +databases and views, and on compaction for existing files. This format is not +supported in previous releases, so rollback would require replication to the +previous CouchDB release or restoring from backup. + +Compression can be disabled by setting ``compression = none`` in your +``local.ini`` ``[couchdb]`` section, but the on-disk format will still be +upgraded. + +Version 1.2.1 +------------- + +Security +^^^^^^^^ + +* Fixed CVE-2012-5641: Apache CouchDB Information disclosure via unescaped + backslashes in URLs on Windows +* Fixed CVE-2012-5649: Apache CouchDB JSONP arbitrary code execution with Adobe + Flash +* Fixed CVE-2012-5650: Apache CouchDB DOM based Cross-Site Scripting via Futon + UI + +HTTP Interface +^^^^^^^^^^^^^^ + +* No longer rewrites the X-CouchDB-Requested-Path during recursive + calls to the rewriter. +* Limit recursion depth in the URL rewriter. Defaults to a maximum + of 100 invocations but is configurable. + +Build System +^^^^^^^^^^^^ + +* Fix couchdb start script. +* Win: fix linker invocations. + +Futon +^^^^^ + +* Disable buttons that aren't available for the logged-in user. + +Replication +^^^^^^^^^^^ + +* Fix potential timeouts. + +View System +^^^^^^^^^^^ + +* Change use of signals to avoid broken view groups. + +Version 1.2.0 +------------- + +Authentication +^^^^^^^^^^^^^^ + +* Fix use of OAuth with VHosts and URL rewriting. +* OAuth secrets can now be stored in the users system database + as an alternative to key value pairs in the .ini configuration. + By default this is disabled (secrets are stored in the .ini) + but can be enabled via the .ini configuration key `use_users_db` + in the `couch_httpd_oauth` section. +* Documents in the _users database are no longer publicly + readable. +* Confidential information in the _replication database is no + longer publicly readable. +* Password hashes are now calculated by CouchDB. Clients are no + longer required to do this manually. +* Cookies used for authentication can be made persistent by enabling + the .ini configuration key `allow_persistent_cookies` in the + `couch_httpd_auth` section. + +Build System +^^^^^^^^^^^^ + +* cURL is no longer required to build CouchDB as it is only + used by the command line JS test runner. If cURL is available + when building CouchJS you can enable the HTTP bindings by + passing -H on the command line. +* Temporarily made `make check` pass with R15B. A more thorough + fix is in the works (:issue:`1424`). +* Fixed --with-js-include and --with-js-lib options. +* Added --with-js-lib-name option. + +Futon +^^^^^ + +* The `Status` screen (active tasks) now displays two new task status + fields: `Started on` and `Updated on`. +* Futon remembers view code every time it is saved, allowing to save an + edit that amounts to a revert. + +HTTP Interface +^^^^^^^^^^^^^^ + +* Added a native JSON parser. +* The _active_tasks API now offers more granular fields. Each + task type is now able to expose different properties. +* Added built-in changes feed filter `_view`. +* Fixes to the `_changes` feed heartbeat option which caused + heartbeats to be missed when used with a filter. This caused + timeouts of continuous pull replications with a filter. +* Properly restart the SSL socket on configuration changes. + +Replicator +^^^^^^^^^^ + +* A new replicator implementation. It offers more performance and + configuration options. +* Passing non-string values to query_params is now a 400 bad + request. This is to reduce the surprise that all parameters + are converted to strings internally. +* Added optional field `since_seq` to replication objects/documents. + It allows to bootstrap a replication from a specific source sequence + number. +* Simpler replication cancellation. In addition to the current method, + replications can now be canceled by specifying the replication ID + instead of the original replication object/document. + +Storage System +^^^^^^^^^^^^^^ + +* Added optional database and view index file compression (using Google's + snappy or zlib's deflate). This feature is enabled by default, but it + can be disabled by adapting local.ini accordingly. The on-disk format + is upgraded on compaction and new DB/view creation to support this. +* Several performance improvements, most notably regarding database writes + and view indexing. +* Computation of the size of the latest MVCC snapshot data and all its + supporting metadata, both for database and view index files. This + information is exposed as the `data_size` attribute in the database and + view group information URIs. +* The size of the buffers used for database and view compaction is now + configurable. +* Added support for automatic database and view compaction. This feature + is disabled by default, but it can be enabled via the .ini configuration. +* Performance improvements for the built-in changes feed filters `_doc_ids` + and `_design`. + +View Server +^^^^^^^^^^^ + +* Add CoffeeScript (http://coffeescript.org/) as a first class view server + language. +* Fixed old index file descriptor leaks after a view cleanup. +* The requested_path property keeps the pre-rewrite path even when no VHost + configuration is matched. +* Fixed incorrect reduce query results when using pagination parameters. +* Made icu_driver work with Erlang R15B and later. + +OAuth +^^^^^ + +* Updated bundled erlang_oauth library to the latest version. + +1.1.x Branch +============ + +.. contents:: + :depth: 1 + :local: + +Version 1.1.2 +------------- + +Security +^^^^^^^^ + +* Fixed CVE-2012-5641: Apache CouchDB Information disclosure via unescaped + backslashes in URLs on Windows. +* Fixed CVE-2012-5649: Apache CouchDB JSONP arbitrary code execution with + Adobe Flash. +* Fixed CVE-2012-5650: Apache CouchDB DOM based Cross-Site Scripting via Futon + UI. + +HTTP Interface +^^^^^^^^^^^^^^ + +* ETag of attachment changes only when the attachment changes, not + the document. +* Fix retrieval of headers larger than 4k. +* Allow OPTIONS HTTP method for list requests. +* Don't attempt to encode invalid json. + +Replicator +^^^^^^^^^^ + +* Fix pull replication of documents with many revisions. +* Fix replication from an HTTP source to an HTTP target. + +View Server +^^^^^^^^^^^ + +* Avoid invalidating view indexes when running out of file descriptors. + +Log System +^^^^^^^^^^ + +* Improvements to log messages for file-related errors. + +Build System +^^^^^^^^^^^^ + +* Don't `ln` the `couchjs` install target on Windows +* Remove ICU version dependency on Windows. +* Improve SpiderMonkey version detection. + +Version 1.1.1 +------------- + +* Support SpiderMonkey 1.8.5 +* Add configurable maximum to the number of bytes returned by _log. +* Allow CommonJS modules to be an empty string. +* Bump minimum Erlang version to R13B02. +* Do not run deleted validate_doc_update functions. +* ETags for views include current sequence if include_docs=true. +* Fix bug where duplicates can appear in _changes feed. +* Fix bug where update handlers break after conflict resolution. +* Fix bug with _replicator where include "filter" could crash couch. +* Fix crashes when compacting large views. +* Fix file descriptor leak in _log +* Fix missing revisions in _changes?style=all_docs. +* Improve handling of compaction at max_dbs_open limit. +* JSONP responses now send "text/javascript" for Content-Type. +* Link to ICU 4.2 on Windows. +* Permit forward slashes in path to update functions. +* Reap couchjs processes that hit reduce_overflow error. +* Status code can be specified in update handlers. +* Support provides() in show functions. +* _view_cleanup when ddoc has no views now removes all index files. +* max_replication_retry_count now supports "infinity". +* Fix replication crash when source database has a document with empty ID. +* Fix deadlock when assigning couchjs processes to serve requests. +* Fixes to the document multipart PUT API. +* Fixes regarding file descriptor leaks for databases with views. + + +Version 1.1.0 +------------- + +.. note:: All CHANGES for 1.0.2 and 1.0.3 also apply to 1.1.0. + +Externals +^^^^^^^^^ + +* Added OS Process module to manage daemons outside of CouchDB. +* Added HTTP Proxy handler for more scalable externals. + +Futon +^^^^^ + +* Added a "change password"-feature to Futon. + +HTTP Interface +^^^^^^^^^^^^^^ + +* Native SSL support. +* Added support for HTTP range requests for attachments. +* Added built-in filters for `_changes`: `_doc_ids` and `_design`. +* Added configuration option for TCP_NODELAY aka "Nagle". +* Allow POSTing arguments to `_changes`. +* Allow `keys` parameter for GET requests to views. +* Allow wildcards in vhosts definitions. +* More granular ETag support for views. +* More flexible URL rewriter. +* Added support for recognizing "Q values" and media parameters in + HTTP Accept headers. +* Validate doc ids that come from a PUT to a URL. + +Replicator +^^^^^^^^^^ + +* Added `_replicator` database to manage replications. +* Fixed issues when an endpoint is a remote database accessible via SSL. +* Added support for continuous by-doc-IDs replication. +* Fix issue where revision info was omitted when replicating attachments. +* Integrity of attachment replication is now verified by MD5. + +Storage System +^^^^^^^^^^^^^^ + +* Multiple micro-optimizations when reading data. + +URL Rewriter & Vhosts +^^^^^^^^^^^^^^^^^^^^^ + +* Fix for variable substituion + +View Server +^^^^^^^^^^^ + +* Added CommonJS support to map functions. +* Added `stale=update_after` query option that triggers a view update after + returning a `stale=ok` response. +* Warn about empty result caused by `startkey` and `endkey` limiting. +* Built-in reduce function `_sum` now accepts lists of integers as input. +* Added view query aliases start_key, end_key, start_key_doc_id and + end_key_doc_id. + +1.0.x Branch +============ + +.. contents:: + :depth: 1 + :local: + +Version 1.0.4 +------------- + +Security +^^^^^^^^ + +* Fixed CVE-2012-5641: Apache CouchDB Information disclosure via unescaped + backslashes in URLs on Windows. +* Fixed CVE-2012-5649: Apache CouchDB JSONP arbitrary code execution with + Adobe Flash. +* Fixed CVE-2012-5650: Apache CouchDB DOM based Cross-Site Scripting via Futon + UI. + +Log System +^^^^^^^^^^ + +* Fix file descriptor leak in `_log`. + +HTTP Interface +^^^^^^^^^^^^^^ + +* Fix missing revisions in `_changes?style=all_docs`. +* Fix validation of attachment names. + +View System +^^^^^^^^^^^ + +* Avoid invalidating view indexes when running out of file descriptors. + +Replicator +^^^^^^^^^^ + +* Fix a race condition where replications can go stale. + +Version 1.0.3 +------------- + +General +^^^^^^^ + +* Fixed compatibility issues with Erlang R14B02. + +Etap Test Suite +^^^^^^^^^^^^^^^ + +* Etap tests no longer require use of port 5984. They now use a randomly + selected port so they won't clash with a running CouchDB. + +Futon +^^^^^ + +* Made compatible with jQuery 1.5.x. + +HTTP Interface +^^^^^^^^^^^^^^ + +* Fix bug that allows invalid UTF-8 after valid escapes. +* The query parameter `include_docs` now honors the parameter `conflicts`. + This applies to queries against map views, _all_docs and _changes. +* Added support for inclusive_end with reduce views. + +Replicator +^^^^^^^^^^ + +* Enabled replication over IPv6. +* Fixed for crashes in continuous and filtered changes feeds. +* Fixed error when restarting replications in OTP R14B02. +* Upgrade ibrowse to version 2.2.0. +* Fixed bug when using a filter and a limit of 1. + +Security +^^^^^^^^ + +* Fixed OAuth signature computation in OTP R14B02. +* Handle passwords with : in them. + +Storage System +^^^^^^^^^^^^^^ + +* More performant queries against _changes and _all_docs when using the + `include_docs` parameter. + +Windows +^^^^^^^ + +* Windows builds now require ICU >= 4.4.0 and Erlang >= R14B03. See + :issue:`1152`, and :issue:`963` + OTP-9139 for more information. + + +Version 1.0.2 +------------- + +Security +^^^^^^^^ + +* Fixed CVE-2010-3854: Apache CouchDB Cross Site Scripting Issue. + +Futon +^^^^^ + +* Make test suite work with Safari and Chrome. +* Fixed animated progress spinner. +* Fix raw view document link due to overzealous URI encoding. +* Spell javascript correctly in loadScript(uri). + +HTTP Interface +^^^^^^^^^^^^^^ + +* Allow reduce=false parameter in map-only views. +* Fix parsing of Accept headers. +* Fix for multipart GET APIs when an attachment was created during a + local-local replication. See :issue:`1022` for details. + +Log System +^^^^^^^^^^ + +* Reduce lengthy stack traces. +* Allow logging of native types. + +Replicator +^^^^^^^^^^ + +* Updated ibrowse library to 2.1.2 fixing numerous replication issues. +* Make sure that the replicator respects HTTP settings defined in the config. +* Fix error when the ibrowse connection closes unexpectedly. +* Fix authenticated replication (with HTTP basic auth) of design documents + with attachments. +* Various fixes to make replication more resilient for edge-cases. + +Storage System +^^^^^^^^^^^^^^ + +* Fix leaking file handles after compacting databases and views. +* Fix databases forgetting their validation function after compaction. +* Fix occasional timeout errors after successfully compacting large databases. +* Fix ocassional error when writing to a database that has just been compacted. +* Fix occasional timeout errors on systems with slow or heavily loaded IO. +* Fix for OOME when compactions include documents with many conflicts. +* Fix for missing attachment compression when MIME types included parameters. +* Preserve purge metadata during compaction to avoid spurious view rebuilds. +* Fix spurious conflicts introduced when uploading an attachment after + a doc has been in a conflict. See :issue:`902` for details. +* Fix for frequently edited documents in multi-master deployments being + duplicated in _changes and _all_docs. See :issue:`968` for details on how + to repair. +* Significantly higher read and write throughput against database and + view index files. + +View Server +^^^^^^^^^^^ + +* Don't trigger view updates when requesting `_design/doc/_info`. +* Fix for circular references in CommonJS requires. +* Made isArray() function available to functions executed in the query server. +* Documents are now sealed before being passed to map functions. +* Force view compaction failure when duplicated document data exists. When + this error is seen in the logs users should rebuild their views from + scratch to fix the issue. See :issue:`999` for details. + + +Version 1.0.1 +------------- + +Security +^^^^^^^^ + +* Fixed CVE-2010-2234: Apache CouchDB Cross Site Request Forgery Attack. + +Authentication +^^^^^^^^^^^^^^ + +* Enable basic-auth popup when required to access the server, to prevent + people from getting locked out. + +Build and System Integration +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* Included additional source files for distribution. + +Futon +^^^^^ + +* User interface element for querying stale (cached) views. + +HTTP Interface +^^^^^^^^^^^^^^ + +* Expose `committed_update_seq` for monitoring purposes. +* Show fields saved along with _deleted=true. Allows for auditing of deletes. +* More robust Accept-header detection. + +Replicator +^^^^^^^^^^ + +* Added support for replication via an HTTP/HTTPS proxy. +* Fix pull replication of attachments from 0.11 to 1.0.x. +* Make the _changes feed work with non-integer seqnums. + +Storage System +^^^^^^^^^^^^^^ + +* Fix data corruption bug :issue:`844`. Please see + http://couchdb.apache.org/notice/1.0.1.html for details. + + +Version 1.0.0 +------------- + +Security +^^^^^^^^ + +* Added authentication caching, to avoid repeated opening and closing of the + users database for each request requiring authentication. + +Storage System +^^^^^^^^^^^^^^ + +* Small optimization for reordering result lists. +* More efficient header commits. +* Use O_APPEND to save lseeks. +* Faster implementation of pread_iolist(). Further improves performance on + concurrent reads. + +View Server +^^^^^^^^^^^ + +* Faster default view collation. +* Added option to include update_seq in view responses. + +0.11.x Branch +============= + +.. contents:: + :depth: 1 + :local: + +Version 0.11.2 +-------------- + +Security +^^^^^^^^ + +* Fixed CVE-2010-2234: Apache CouchDB Cross Site Request Forgery Attack. +* Avoid potential DOS attack by guarding all creation of atoms. + +Authentication +^^^^^^^^^^^^^^ + +* User documents can now be deleted by admins or the user. + +Futon +^^^^^ + +* Add some Futon files that were missing from the Makefile. + +HTTP Interface +^^^^^^^^^^^^^^ + +* Better error messages on invalid URL requests. + +Replicator +^^^^^^^^^^ + +* Fix bug when pushing design docs by non-admins, which was hanging the + replicator for no good reason. +* Fix bug when pulling design documents from a source that requires + basic-auth. + + +Version 0.11.1 +-------------- + +Build and System Integration +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* Output of `couchdb --help` has been improved. +* Fixed compatibility with the Erlang R14 series. +* Fixed warnings on Linux builds. +* Fixed build error when aclocal needs to be called during the build. +* Require ICU 4.3.1. +* Fixed compatibility with Solaris. + +Configuration System +^^^^^^^^^^^^^^^^^^^^ + +* Fixed timeout with large .ini files. + +Futon +^^^^^ + +* Use "expando links" for over-long document values in Futon. +* Added continuous replication option. +* Added option to replicating test results anonymously to a community + CouchDB instance. +* Allow creation and deletion of config entries. +* Fixed display issues with doc ids that have escaped characters. +* Fixed various UI issues. + +HTTP Interface +^^^^^^^^^^^^^^ + +* Mask passwords in active tasks and logging. +* Update mochijson2 to allow output of BigNums not in float form. +* Added support for X-HTTP-METHOD-OVERRIDE. +* Better error message for database names. +* Disable jsonp by default. +* Accept gzip encoded standalone attachments. +* Made max_concurrent_connections configurable. +* Made changes API more robust. +* Send newly generated document rev to callers of an update function. + +JavaScript Clients +^^^^^^^^^^^^^^^^^^ + +* Added tests for couch.js and jquery.couch.js +* Added changes handler to jquery.couch.js. +* Added cache busting to jquery.couch.js if the user agent is msie. +* Added support for multi-document-fetch (via _all_docs) to jquery.couch.js. +* Added attachment versioning to jquery.couch.js. +* Added option to control ensure_full_commit to jquery.couch.js. +* Added list functionality to jquery.couch.js. +* Fixed issues where bulkSave() wasn't sending a POST body. + +Log System +^^^^^^^^^^ + +* Log HEAD requests as HEAD, not GET. +* Keep massive JSON blobs out of the error log. +* Fixed a timeout issue. + +Replication System +^^^^^^^^^^^^^^^^^^ + +* Refactored various internal APIs related to attachment streaming. +* Fixed hanging replication. +* Fixed keepalive issue. + +Security +^^^^^^^^ + +* Added authentication redirect URL to log in clients. +* Fixed query parameter encoding issue in oauth.js. +* Made authentication timeout configurable. +* Temporary views are now admin-only resources. + +Storage System +^^^^^^^^^^^^^^ + +* Don't require a revpos for attachment stubs. +* Added checking to ensure when a revpos is sent with an attachment stub, + it's correct. +* Make file deletions async to avoid pauses during compaction and db + deletion. +* Fixed for wrong offset when writing headers and converting them to blocks, + only triggered when header is larger than 4k. +* Preserve _revs_limit and instance_start_time after compaction. + +Test Suite +^^^^^^^^^^ + +* Made the test suite overall more reliable. + +View Server +^^^^^^^^^^^ + +* Provide a UUID to update functions (and all other functions) that they can + use to create new docs. +* Upgrade CommonJS modules support to 1.1.1. +* Fixed erlang filter funs and normalize filter fun API. +* Fixed hang in view shutdown. + +URL Rewriter & Vhosts +^^^^^^^^^^^^^^^^^^^^^ + +* Allow more complex keys in rewriter. +* Allow global rewrites so system defaults are available in vhosts. +* Allow isolation of databases with vhosts. +* Fix issue with passing variables to query parameters. + + +Version 0.11.0 +-------------- + +Build and System Integration +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* Updated and improved source documentation. +* Fixed distribution preparation for building on Mac OS X. +* Added support for building a Windows installer as part of 'make dist'. +* Bug fix for building couch.app's module list. +* ETap tests are now run during make distcheck. This included a number of + updates to the build system to properly support VPATH builds. +* Gavin McDonald setup a build-bot instance. More info can be found at + http://ci.apache.org/buildbot.html + +Futon +^^^^^ + +* Added a button for view compaction. +* JSON strings are now displayed as-is in the document view, without the + escaping of new-lines and quotes. That dramatically improves readability of + multi-line strings. +* Same goes for editing of JSON string values. When a change to a field value is + submitted, and the value is not valid JSON it is assumed to be a string. This + improves editing of multi-line strings a lot. +* Hitting tab in textareas no longer moves focus to the next form field, but + simply inserts a tab character at the current caret position. +* Fixed some font declarations. + +HTTP Interface +^^^^^^^^^^^^^^ + +* Provide Content-MD5 header support for attachments. +* Added URL Rewriter handler. +* Added virtual host handling. + +Replication +^^^^^^^^^^^ + +* Added option to implicitly create replication target databases. +* Avoid leaking file descriptors on automatic replication restarts. +* Added option to replicate a list of documents by id. +* Allow continuous replication to be cancelled. + +Runtime Statistics +^^^^^^^^^^^^^^^^^^ + +* Statistics are now calculated for a moving window instead of non-overlapping + timeframes. +* Fixed a problem with statistics timers and system sleep. +* Moved statistic names to a term file in the priv directory. + +Security +^^^^^^^^ + +* Fixed CVE-2010-0009: Apache CouchDB Timing Attack Vulnerability. +* Added default cookie-authentication and users database. +* Added Futon user interface for user signup and login. +* Added per-database reader access control lists. +* Added per-database security object for configuration data in validation + functions. +* Added proxy authentication handler + +Storage System +^^^^^^^^^^^^^^ + +* Adds batching of multiple updating requests, to improve throughput with many + writers. Removed the now redundant couch_batch_save module. +* Adds configurable compression of attachments. + +View Server +^^^^^^^^^^^ + +* Added optional 'raw' binary collation for faster view builds where Unicode + collation is not important. +* Improved view index build time by reducing ICU collation callouts. +* Improved view information objects. +* Bug fix for partial updates during view builds. +* Move query server to a design-doc based protocol. +* Use json2.js for JSON serialization for compatiblity with native JSON. +* Major refactoring of couchjs to lay the groundwork for disabling cURL + support. The new HTTP interaction acts like a synchronous XHR. Example usage + of the new system is in the JavaScript CLI test runner. + + + +0.10.x Branch +============= + +.. contents:: + :depth: 1 + :local: + +Version 0.10.1 +-------------- + +Build and System Integration +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* Test suite now works with the distcheck target. + +Replicator +^^^^^^^^^^ + +* Stability enhancements regarding redirects, timeouts, OAuth. + +Query Server +^^^^^^^^^^^^ + +* Avoid process leaks +* Allow list and view to span languages + +Stats +^^^^^ + +* Eliminate new process flood on system wake + + +Version 0.10.0 +-------------- + +Build and System Integration +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* Changed `couchdb` script configuration options. +* Added default.d and local.d configuration directories to load sequence. + +HTTP Interface +^^^^^^^^^^^^^^ + +* Added optional cookie-based authentication handler. +* Added optional two-legged OAuth authentication handler. + +Storage Format +^^^^^^^^^^^^^^ + +* Add move headers with checksums to the end of database files for extra robust + storage and faster storage. + +View Server +^^^^^^^^^^^ + +* Added native Erlang views for high-performance applications. + +0.9.x Branch +============ + +.. contents:: + :depth: 1 + :local: + +Version 0.9.2 +------------- + +Build and System Integration +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* Remove branch callbacks to allow building couchjs against newer versions of + Spidermonkey. + +Replication +^^^^^^^^^^^ + +* Fix replication with 0.10 servers initiated by an 0.9 server (:issue:`559`). + + +Version 0.9.1 +------------- + +Build and System Integration +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* PID file directory is now created by the SysV/BSD daemon scripts. +* Fixed the environment variables shown by the configure script. +* Fixed the build instructions shown by the configure script. +* Updated ownership and permission advice in `README` for better security. + +Configuration and stats system +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* Corrected missing configuration file error message. +* Fixed incorrect recording of request time. + +Database Core +^^^^^^^^^^^^^ + +* Document validation for underscore prefixed variables. +* Made attachment storage less sparse. +* Fixed problems when a database with delayed commits pending is considered + idle, and subject to losing changes when shutdown. (:issue:`334`) + +External Handlers +^^^^^^^^^^^^^^^^^ + +* Fix POST requests. + +Futon +^^^^^ + +* Redirect when loading a deleted view URI from the cookie. + +HTTP Interface +^^^^^^^^^^^^^^ + +* Attachment requests respect the "rev" query-string parameter. + +JavaScript View Server +^^^^^^^^^^^^^^^^^^^^^^ + +* Useful JavaScript Error messages. + +Replication +^^^^^^^^^^^ + +* Added support for Unicode characters transmitted as UTF-16 surrogate pairs. +* URL-encode attachment names when necessary. +* Pull specific revisions of an attachment, instead of just the latest one. +* Work around a rare chunk-merging problem in ibrowse. +* Work with documents containing Unicode characters outside the Basic + Multilingual Plane. + + +Version 0.9.0 +------------- + +Build and System Integration +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* The `couchdb` script now supports system chainable configuration files. +* The Mac OS X daemon script now redirects STDOUT and STDERR like SysV/BSD. +* The build and system integration have been improved for portability. +* Added COUCHDB_OPTIONS to etc/default/couchdb file. +* Remove COUCHDB_INI_FILE and COUCHDB_PID_FILE from etc/default/couchdb file. +* Updated `configure.ac` to manually link `libm` for portability. +* Updated `configure.ac` to extended default library paths. +* Removed inets configuration files. +* Added command line test runner. +* Created dev target for make. + +Configuration and stats system +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* Separate default and local configuration files. +* HTTP interface for configuration changes. +* Statistics framework with HTTP query API. + +Database Core +^^^^^^^^^^^^^ + +* Faster B-tree implementation. +* Changed internal JSON term format. +* Improvements to Erlang VM interactions under heavy load. +* User context and administrator role. +* Update validations with design document validation functions. +* Document purge functionality. +* Ref-counting for database file handles. + +Design Document Resource Paths +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* Added httpd_design_handlers config section. +* Moved _view to httpd_design_handlers. +* Added ability to render documents as non-JSON content-types with _show and + _list functions, which are also httpd_design_handlers. + +Futon Utility Client +^^^^^^^^^^^^^^^^^^^^ + +* Added pagination to the database listing page. +* Implemented attachment uploading from the document page. +* Added page that shows the current configuration, and allows modification of + option values. +* Added a JSON "source view" for document display. +* JSON data in view rows is now syntax highlighted. +* Removed the use of an iframe for better integration with browser history and + bookmarking. +* Full database listing in the sidebar has been replaced by a short list of + recent databases. +* The view editor now allows selection of the view language if there is more + than one configured. +* Added links to go to the raw view or document URI. +* Added status page to display currently running tasks in CouchDB. +* JavaScript test suite split into multiple files. +* Pagination for reduce views. + +HTTP Interface +^^^^^^^^^^^^^^ + +* Added client side UUIDs for idempotent document creation +* HTTP COPY for documents +* Streaming of chunked attachment PUTs to disk +* Remove negative count feature +* Add include_docs option for view queries +* Add multi-key view post for views +* Query parameter validation +* Use stale=ok to request potentially cached view index +* External query handler module for full-text or other indexers. +* Etags for attachments, views, shows and lists +* Show and list functions for rendering documents and views as developer + controlled content-types. +* Attachment names may use slashes to allow uploading of nested directories + (useful for static web hosting). +* Option for a view to run over design documents. +* Added newline to JSON responses. Closes bike-shed. + +Replication +^^^^^^^^^^^ + +* Using ibrowse. +* Checkpoint replications so failures are less expensive. +* Automatically retry of failed replications. +* Stream attachments in pull-replication. + +0.8.x Branch +============ + +.. contents:: + :depth: 1 + :local: + +Version 0.8.1-incubating +------------------------ + +Build and System Integration +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* The `couchdb` script no longer uses `awk` for configuration checks as this + was causing portability problems. +* Updated `sudo` example in `README` to use the `-i` option, this fixes + problems when invoking from a directory the `couchdb` user cannot access. + +Database Core +^^^^^^^^^^^^^ + +* Fix for replication problems where the write queues can get backed up if the + writes aren't happening fast enough to keep up with the reads. For a large + replication, this can exhaust memory and crash, or slow down the machine + dramatically. The fix keeps only one document in the write queue at a time. +* Fix for databases sometimes incorrectly reporting that they contain 0 + documents after compaction. +* CouchDB now uses ibrowse instead of inets for its internal HTTP client + implementation. This means better replication stability. + +Futon +^^^^^ + +* The view selector dropdown should now work in Opera and Internet Explorer + even when it includes optgroups for design documents. (:issue:`81`) + +JavaScript View Server +^^^^^^^^^^^^^^^^^^^^^^ + +* Sealing of documents has been disabled due to an incompatibility with + SpiderMonkey 1.9. +* Improve error handling for undefined values emitted by map functions. + (:issue:`83`) + +HTTP Interface +^^^^^^^^^^^^^^ + +* Fix for chunked responses where chunks were always being split into multiple + TCP packets, which caused problems with the test suite under Safari, and in + some other cases. +* Fix for an invalid JSON response body being returned for some kinds of + views. (:issue:`84`) +* Fix for connections not getting closed after rejecting a chunked request. + (:issue:`55`) +* CouchDB can now be bound to IPv6 addresses. +* The HTTP `Server` header now contains the versions of CouchDB and Erlang. + + +Version 0.8.0-incubating +------------------------ + +Build and System Integration +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* CouchDB can automatically respawn following a server crash. +* Database server no longer refuses to start with a stale PID file. +* System logrotate configuration provided. +* Improved handling of ICU shared libraries. +* The `couchdb` script now automatically enables SMP support in Erlang. +* The `couchdb` and `couchjs` scripts have been improved for portability. +* The build and system integration have been improved for portability. + +Database Core +^^^^^^^^^^^^^ + +* The view engine has been completely decoupled from the storage engine. Index + data is now stored in separate files, and the format of the main database + file has changed. +* Databases can now be compacted to reclaim space used for deleted documents + and old document revisions. +* Support for incremental map/reduce views has been added. +* To support map/reduce, the structure of design documents has changed. View + values are now JSON objects containing at least a `map` member, and + optionally a `reduce` member. +* View servers are now identified by name (for example `javascript`) instead of + by media type. +* Automatically generated document IDs are now based on proper UUID generation + using the crypto module. +* The field `content-type` in the JSON representation of attachments has been + renamed to `content_type` (underscore). + +Futon +^^^^^ + +* When adding a field to a document, Futon now just adds a field with an + autogenerated name instead of prompting for the name with a dialog. The name + is automatically put into edit mode so that it can be changed immediately. +* Fields are now sorted alphabetically by name when a document is displayed. +* Futon can be used to create and update permanent views. +* The maximum number of rows to display per page on the database page can now + be adjusted. +* Futon now uses the XMLHTTPRequest API asynchronously to communicate with the + CouchDB HTTP server, so that most operations no longer block the browser. +* View results sorting can now be switched between ascending and descending by + clicking on the `Key` column header. +* Fixed a bug where documents that contained a `@` character could not be + viewed. (:issue:`12`) +* The database page now provides a `Compact` button to trigger database + compaction. (:issue:`38`) +* Fixed portential double encoding of document IDs and other URI segments in + many instances. (:issue:`39`) +* Improved display of attachments. +* The JavaScript Shell has been removed due to unresolved licensing issues. + +JavaScript View Server +^^^^^^^^^^^^^^^^^^^^^^ + +* SpiderMonkey is no longer included with CouchDB, but rather treated as a + normal external dependency. A simple C program (`_couchjs`) is provided that + links against an existing SpiderMonkey installation and uses the interpreter + embedding API. +* View functions using the default JavaScript view server can now do logging + using the global `log(message)` function. Log messages are directed into the + CouchDB log at `INFO` level. (:issue:`59`) +* The global `map(key, value)` function made available to view code has been + renamed to `emit(key, value)`. +* Fixed handling of exceptions raised by view functions. + +HTTP Interface +^^^^^^^^^^^^^^ + +* CouchDB now uses MochiWeb instead of inets for the HTTP server + implementation. Among other things, this means that the extra configuration + files needed for inets (such as `couch_httpd.conf`) are no longer used. +* The HTTP interface now completely supports the `HEAD` method. (:issue:`3`) +* Improved compliance of `Etag` handling with the HTTP specification. + (:issue:`13`) +* Etags are no longer included in responses to document `GET` requests that + include query string parameters causing the JSON response to change without + the revision or the URI having changed. +* The bulk document update API has changed slightly on both the request and the + response side. In addition, bulk updates are now atomic. +* CouchDB now uses `TCP_NODELAY` to fix performance problems with persistent + connections on some platforms due to nagling. +* Including a `?descending=false` query string parameter in requests to views + no longer raises an error. +* Requests to unknown top-level reserved URLs (anything with a leading + underscore) now return a `unknown_private_path` error instead of the + confusing `illegal_database_name`. +* The Temporary view handling now expects a JSON request body, where the JSON + is an object with at least a `map` member, and optional `reduce` and + `language` members. +* Temporary views no longer determine the view server based on the Content-Type + header of the `POST` request, but rather by looking for a `language` member + in the JSON body of the request. +* The status code of responses to `DELETE` requests is now 200 to reflect that + that the deletion is performed synchronously. diff -Nru couchdb-1.2.0/share/doc/build/html/_sources/changes.txt couchdb-1.4.0~rc.1/share/doc/build/html/_sources/changes.txt --- couchdb-1.2.0/share/doc/build/html/_sources/changes.txt 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_sources/changes.txt 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,227 @@ +.. Licensed under the Apache License, Version 2.0 (the "License"); you may not +.. use this file except in compliance with the License. You may obtain a copy of +.. the License at +.. +.. http://www.apache.org/licenses/LICENSE-2.0 +.. +.. Unless required by applicable law or agreed to in writing, software +.. distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +.. WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +.. License for the specific language governing permissions and limitations under +.. the License. + +.. _changes: + +============ +Changes Feed +============ + +Polling +======= + +A list of changes made to documents in the database, in the order they were +made, can be obtained from the database's ``_changes`` resource. You can query +the ``_changes`` resource by issuing a ``GET`` request with the following +(optional) parameters: + ++--------------+----------------------------------------------+---------------+--------------+ +| Parameter | Value | Default Value | Notes | ++==============+==============================================+===============+==============+ +| since | seqnum / now | 0 | \(1) | ++--------------+----------------------------------------------+---------------+--------------+ +| limit | maxsequences | none | \(2) | ++--------------+----------------------------------------------+---------------+--------------+ +| descending | boolean | false | \(3) | ++--------------+----------------------------------------------+---------------+--------------+ +| feed | normal / longpoll / continuous / eventsource | normal | \(4) | ++--------------+----------------------------------------------+---------------+--------------+ +| heartbeat | milliseconds | 60000 | \(5) | ++--------------+----------------------------------------------+---------------+--------------+ +| timeout | milliseconds | 60000 | \(6) | ++--------------+----------------------------------------------+---------------+--------------+ +| filter | designdoc/filtername / _view | none | \(7) | ++--------------+----------------------------------------------+---------------+--------------+ +| include_docs | boolean | false | \(8) | ++--------------+----------------------------------------------+---------------+--------------+ +| style | all_docs / main_only | main_only | \(9) | ++--------------+----------------------------------------------+---------------+--------------+ +| view | designdoc/filtername | none | \(10) | ++--------------+----------------------------------------------+---------------+--------------+ + +Notes: + +(1) Start the results from the change immediately after the given sequence + number. + +(2) Limit number of result rows to the specified value (note that using 0 here + has the same effect as 1). + +(3) Return the change results in descending sequence order (most recent change + first) + +(4) Select the type of feed. + +(5) Period in milliseconds after which an empty line is sent in the results. + Only applicable for `longpoll` or `continuous` feeds. Overrides any timeout + to keep the feed alive indefinitely. + +(6) Maximum period in milliseconds to wait for a change before the response is + sent, even if there are no results. Only applicable for `longpoll` or + `continuous` feeds. Note that 60000 is also the default maximum timeout to + prevent undetected dead connections. + + You can change the default maximum timeout in your ini-configuration: + + .. code-block:: ini + + [httpd] + changes_timeout=#millisecs + +(7) Reference to a :ref:`filter function ` from a design document + that will filter whole stream emitting only filtered events. + See the `section in the book`_ for more information. + +(8) Include the associated document with each result. If there are conflicts, + only the winning revision is returned. + +(9) Specifies how many revisions are returned in the changes array. + The default, `main_only`, will only return the current "winning" revision; + `all_docs` will return all leaf revisions (including conflicts and deleted + former conflicts.) + +(10) Allows to use view functions as filters. It requires to set ``filter`` + special value `_view` to enable this feature. Documents counted as "passed" + for view filter in case if map function emits at least one record for them. + +.. versionchanged:: 0.11.0 added ``include_docs`` parameter +.. versionchanged:: 1.2.0 added ``view`` parameter and special value `_view` + for ``filter`` one +.. versionchanged:: 1.3.0 ``since`` parameter could take `now` value to start + listen changes since current seq number. +.. versionchanged:: 1.3.0 ``eventsource`` feed type added. + +By default all changes are immediately returned as a JSON object:: + + GET /somedatabase/_changes HTTP/1.1 + +.. code-block:: javascript + + {"results":[ + {"seq":1,"id":"fresh","changes":[{"rev":"1-967a00dff5e02add41819138abb3284d"}]}, + {"seq":3,"id":"updated","changes":[{"rev":"2-7051cbe5c8faecd085a3fa619e6e6337"}]}, + {"seq":5,"id":"deleted","changes":[{"rev":"2-eec205a9d413992850a6e32678485900"}],"deleted":true} + ], + "last_seq":5} + +``results`` is the list of changes in sequential order. New and changed +documents only differ in the value of the rev; deleted documents include the +``"deleted": true`` attribute. (In the ``style=all_docs mode``, deleted applies +only to the current/winning revision. The other revisions listed might be +deleted even if there is no deleted property; you have to ``GET`` them +individually to make sure.) + +``last_seq`` is the sequence number of the last update returned. (Currently it +will always be the same as the seq of the last item in results.) + +Sending a ``since`` param in the query string skips all changes up to and +including the given sequence number:: + + GET /somedatabase/_changes?since=3 HTTP/1.1 + +.. code-block:: javascript + + {"results":[ + {"seq":5,"id":"deleted","changes":[{"rev":"2-eec205a9d413992850a6e32678485900"}],"deleted":true} + ], + "last_seq":5}  + +Long Polling +============ + +The `longpoll` feed (probably most useful used from a browser) is a more +efficient form of polling that waits for a change to occur before the response +is sent. `longpoll` avoids the need to frequently poll CouchDB to discover +nothing has changed! + +The response is basically the same JSON as is sent for the normal feed. + +A timeout limits the maximum length of time the connection is open. If there +are no changes before the timeout expires the response's results will be an +empty list.   + +Continuous +========== + +Polling the CouchDB server is not a good thing to do. Setting up new HTTP +connections just to tell the client that nothing happened puts unnecessary +strain on CouchDB. + +A continuous feed stays open and connected to the database until explicitly +closed and changes are sent to the client as they happen, i.e. in near +real-time. + +The continuous feed's response is a little different than the other feed types +to simplify the job of the client - each line of the response is either empty +or a JSON object representing a single change, as found in the normal feed's +results. + +.. code-block:: text + + GET /somedatabase/_changes?feed=continuous HTTP/1.1 + +.. code-block:: javascript + + {"seq":1,"id":"fresh","changes":[{"rev":"1-967a00dff5e02add41819138abb3284d"}]} + {"seq":3,"id":"updated","changes":[{"rev":"2-7051cbe5c8faecd085a3fa619e6e6337"}]} + {"seq":5,"id":"deleted","changes":[{"rev":"2-eec205a9d413992850a6e32678485900"}],"deleted":true} + ... tum tee tum ... + {"seq":6,"id":"updated","changes":[{"rev":"3-825cb35de44c433bfb2df415563a19de"}]} + +Obviously, `... tum tee tum ...` does not appear in the actual response, but +represents a long pause before the change with seq 6 occurred.   + +.. _section in the book: http://books.couchdb.org/relax/reference/change-notifications + +Event Source +============ + +The `eventsource` feed provides push notifications that can be consumed in +the form of DOM events in the browser. Refer to the `W3C eventsource +specification`_ for further details. CouchDB honors the ``Last-Event-ID`` header, +and if it's present it will take precedence over the ``since`` query parameter. + +.. code-block:: text + + GET /somedatabase/_changes?feed=eventsource HTTP/1.1 + +.. code-block:: javascript + + // define the event handling function + if (window.EventSource) { + + var source = new EventSource("/somedatabase/_changes?feed=eventsource"); + source.onerror = function(e) { + alert('EventSource failed.'); + }; + + var results = []; + var sourceListener = function(e) { + var data = JSON.parse(e.data); + results.push(data); + }; + + // start listening for events + source.addEventListener('message', sourceListener, false); + + // stop listening for events + source.removeEventListener('message', sourceListener, false); + + } + +.. note:: + + EventSource connections are subject to cross-origin resource sharing + restrictions. You might need to use the experimental :ref:`CORS support + ` to get the EventSource to work in your application. + +.. _W3C eventsource specification: http://www.w3.org/TR/eventsource/ diff -Nru couchdb-1.2.0/share/doc/build/html/_sources/config_reference.txt couchdb-1.4.0~rc.1/share/doc/build/html/_sources/config_reference.txt --- couchdb-1.2.0/share/doc/build/html/_sources/config_reference.txt 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_sources/config_reference.txt 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,330 @@ +.. Licensed under the Apache License, Version 2.0 (the "License"); you may not +.. use this file except in compliance with the License. You may obtain a copy of +.. the License at +.. +.. http://www.apache.org/licenses/LICENSE-2.0 +.. +.. Unless required by applicable law or agreed to in writing, software +.. distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +.. WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +.. License for the specific language governing permissions and limitations under +.. the License. + +Configuration Reference +======================= + + +Configuration Groups +-------------------- + ++----------------------------------+-------------------------------------------+ +| Section | Description | ++==================================+===========================================+ +| attachments | Attachment options | ++----------------------------------+-------------------------------------------+ +| couchdb | CouchDB specific options | ++----------------------------------+-------------------------------------------+ +| couch_httpd_auth | HTTPD Authentication options | ++----------------------------------+-------------------------------------------+ +| daemons | Daemons and background processes | ++----------------------------------+-------------------------------------------+ +| httpd | HTTPD Server options | ++----------------------------------+-------------------------------------------+ +| httpd_db_handlers | Database Operation handlers | ++----------------------------------+-------------------------------------------+ +| httpd_design_handlers | Handlers for design document operations | ++----------------------------------+-------------------------------------------+ +| httpd_global_handlers | Handlers for global operations | ++----------------------------------+-------------------------------------------+ +| log | Logging options | ++----------------------------------+-------------------------------------------+ +| query_servers | Query Server options | ++----------------------------------+-------------------------------------------+ +| query_server_config | Query server options | ++----------------------------------+-------------------------------------------+ +| replicator | Replicator Options | ++----------------------------------+-------------------------------------------+ +| ssl | SSL (Secure Sockets Layer) Options | ++----------------------------------+-------------------------------------------+ +| stats | Statistics options | ++----------------------------------+-------------------------------------------+ +| uuids | UUID generation options | ++----------------------------------+-------------------------------------------+ +| cors | Cross Origin Resource Sharing settings | ++----------------------------------+-------------------------------------------+ + +attachments Configuration Options +--------------------------------- + ++--------------------------------------+---------------------------------------+ +| Option | Description | ++======================================+=======================================+ +| compressible_types | compressible_types | ++--------------------------------------+---------------------------------------+ +| compression_level | compression_level | ++--------------------------------------+---------------------------------------+ + +couchdb Configuration Options +----------------------------- + ++--------------------------------------+---------------------------------------+ +| Option | Description | ++======================================+=======================================+ +| database_dir | database_dir | ++--------------------------------------+---------------------------------------+ +| delayed_commits | delayed_commits | ++--------------------------------------+---------------------------------------+ +| max_attachment_chunk_size | max_attachment_chunk_size | ++--------------------------------------+---------------------------------------+ +| max_dbs_open | max_dbs_open | ++--------------------------------------+---------------------------------------+ +| max_document_size | max_document_size | ++--------------------------------------+---------------------------------------+ +| os_process_timeout | os_process_timeout | ++--------------------------------------+---------------------------------------+ +| uri_file | uri_file | ++--------------------------------------+---------------------------------------+ +| util_driver_dir | util_driver_dir | ++--------------------------------------+---------------------------------------+ +| view_index_dir | view_index_dir | ++--------------------------------------+---------------------------------------+ + +daemons Configuration Options +----------------------------- + ++--------------------------------------+---------------------------------------+ +| Option | Description | ++======================================+=======================================+ +| auth_cache | auth_cache | ++--------------------------------------+---------------------------------------+ +| db_update_notifier | db_update_notifier | ++--------------------------------------+---------------------------------------+ +| external_manager | external_manager | ++--------------------------------------+---------------------------------------+ +| httpd | httpd | ++--------------------------------------+---------------------------------------+ +| httpsd | Enabled HTTPS service | ++--------------------------------------+---------------------------------------+ +| query_servers | query_servers | ++--------------------------------------+---------------------------------------+ +| stats_aggregator | stats_aggregator | ++--------------------------------------+---------------------------------------+ +| stats_collector | stats_collector | ++--------------------------------------+---------------------------------------+ +| uuids | uuids | ++--------------------------------------+---------------------------------------+ +| view_manager | view_manager | ++--------------------------------------+---------------------------------------+ + +httpd_db_handlers Configuration Options +--------------------------------------- + ++--------------------------------------+---------------------------------------+ +| Option | Description | ++======================================+=======================================+ +| _changes | _changes | ++--------------------------------------+---------------------------------------+ +| _compact | _compact | ++--------------------------------------+---------------------------------------+ +| _design | _design | ++--------------------------------------+---------------------------------------+ +| _temp_view | _temp_view | ++--------------------------------------+---------------------------------------+ +| _view_cleanup | _view_cleanup | ++--------------------------------------+---------------------------------------+ + +.. _config-couch_httpd_auth: + +couch_httpd_auth Configuration Options +-------------------------------------- + ++--------------------------------------+---------------------------------------+ +| Option | Description | ++======================================+=======================================+ +| auth_cache_size | auth_cache_size | ++--------------------------------------+---------------------------------------+ +| authentication_db | authentication_db | ++--------------------------------------+---------------------------------------+ +| authentication_redirect | authentication_redirect | ++--------------------------------------+---------------------------------------+ +| require_valid_user | require_valid_user | ++--------------------------------------+---------------------------------------+ +| timeout | timeout | ++--------------------------------------+---------------------------------------+ +| iterations | Password key derivation iterations | ++--------------------------------------+---------------------------------------+ +| users_db_public | Allow all users to view user documents| ++--------------------------------------+---------------------------------------+ +| public_fields | World-viewable user document fields | ++--------------------------------------+---------------------------------------+ + +.. note:: + Using the `public_fields` whitelist for user document properties requires + setting the `users_db_public` option to `true` (the latter option has no + other purpose). + +httpd Configuration Options +--------------------------- + ++--------------------------------------+---------------------------------------+ +| Option | Description | ++======================================+=======================================+ +| allow_jsonp | allow_jsonp | ++--------------------------------------+---------------------------------------+ +| authentication_handlers | authentication_handlers | ++--------------------------------------+---------------------------------------+ +| bind_address | bind_address | ++--------------------------------------+---------------------------------------+ +| default_handler | default_handler | ++--------------------------------------+---------------------------------------+ +| max_connections | max_connections | ++--------------------------------------+---------------------------------------+ +| nodelay | Enable TCP_NODELAY | ++--------------------------------------+---------------------------------------+ +| port | port | ++--------------------------------------+---------------------------------------+ +| secure_rewrites | secure_rewrites | ++--------------------------------------+---------------------------------------+ +| vhost_global_handlers | vhost_global_handlers | ++--------------------------------------+---------------------------------------+ +| enable_cors | enables CORS functionality when true | ++--------------------------------------+---------------------------------------+ + +httpd_design_handlers Configuration Options +------------------------------------------- + ++--------------------------------------+---------------------------------------+ +| Option | Description | ++======================================+=======================================+ +| _info | _info | ++--------------------------------------+---------------------------------------+ +| _list | _list | ++--------------------------------------+---------------------------------------+ +| _rewrite | _rewrite | ++--------------------------------------+---------------------------------------+ +| _show | _show | ++--------------------------------------+---------------------------------------+ +| _update | _update | ++--------------------------------------+---------------------------------------+ +| _view | _view | ++--------------------------------------+---------------------------------------+ + +httpd_global_handlers Configuration Options +------------------------------------------- + ++--------------------------------------+---------------------------------------+ +| Option | Description | ++======================================+=======================================+ +| / | / | ++--------------------------------------+---------------------------------------+ +| _active_tasks | _active_tasks | ++--------------------------------------+---------------------------------------+ +| _all_dbs | _all_dbs | ++--------------------------------------+---------------------------------------+ +| _config | _config | ++--------------------------------------+---------------------------------------+ +| _log | _log | ++--------------------------------------+---------------------------------------+ +| _oauth | _oauth | ++--------------------------------------+---------------------------------------+ +| _replicate | _replicate | ++--------------------------------------+---------------------------------------+ +| _restart | _restart | ++--------------------------------------+---------------------------------------+ +| _session | _session | ++--------------------------------------+---------------------------------------+ +| _stats | _stats | ++--------------------------------------+---------------------------------------+ +| _utils | _utils | ++--------------------------------------+---------------------------------------+ +| _uuids | _uuids | ++--------------------------------------+---------------------------------------+ +| favicon.ico | favicon.ico | ++--------------------------------------+---------------------------------------+ + +log Configuration Options +------------------------- + ++--------------------------------------+---------------------------------------+ +| Option | Description | ++======================================+=======================================+ +| file | file | ++--------------------------------------+---------------------------------------+ +| include_sasl | include_sasl | ++--------------------------------------+---------------------------------------+ +| level | level | ++--------------------------------------+---------------------------------------+ + +query_servers Configuration Options +----------------------------------- + ++--------------------------------------+---------------------------------------+ +| Option | Description | ++======================================+=======================================+ +| javascript | javascript | ++--------------------------------------+---------------------------------------+ + +query_server_config Configuration Options +----------------------------------------- + ++--------------------------------------+---------------------------------------+ +| Option | Description | ++======================================+=======================================+ +| reduce_limit | reduce_limit | ++--------------------------------------+---------------------------------------+ + +replicator Configuration Options +-------------------------------- + ++--------------------------------------+---------------------------------------+ +| Option | Description | ++======================================+=======================================+ +| max_http_pipeline_size | max_http_pipeline_size | ++--------------------------------------+---------------------------------------+ +| max_http_sessions | max_http_sessions | ++--------------------------------------+---------------------------------------+ + +stats Configuration Options +--------------------------- + ++--------------------------------------+---------------------------------------+ +| Option | Description | ++======================================+=======================================+ +| rate | rate | ++--------------------------------------+---------------------------------------+ +| samples | samples | ++--------------------------------------+---------------------------------------+ + +uuids Configuration Options +--------------------------- + ++--------------------------------------+---------------------------------------+ +| Option | Description | ++======================================+=======================================+ +| algorithm | algorithm | ++--------------------------------------+---------------------------------------+ + + +cors Configuration Options +--------------------------- + ++--------------------------------------+---------------------------------------+ +| Option | Description | ++======================================+=======================================+ +| origins | List of origins, separated by a comma | +| | (protocol, host, optional port) | ++--------------------------------------+---------------------------------------+ +| methods | accepted HTTP methods | ++--------------------------------------+---------------------------------------+ +| credentials | `true` sends additional header | +| | Access-Control-Allow-Credentials=true | ++--------------------------------------+---------------------------------------+ + +Note that `credentials=true` and `origins=*` are mutually exclusive. + +cors vhost Configuration +------------------------ + +The same configuration options for `cors` overall may be applied to an +individual vhost, within a specific section header, for `example.com` the +appropriate section would be: `[cors:http://example.com]` diff -Nru couchdb-1.2.0/share/doc/build/html/_sources/configuring.txt couchdb-1.4.0~rc.1/share/doc/build/html/_sources/configuring.txt --- couchdb-1.2.0/share/doc/build/html/_sources/configuring.txt 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_sources/configuring.txt 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,629 @@ +.. Licensed under the Apache License, Version 2.0 (the "License"); you may not +.. use this file except in compliance with the License. You may obtain a copy of +.. the License at +.. +.. http://www.apache.org/licenses/LICENSE-2.0 +.. +.. Unless required by applicable law or agreed to in writing, software +.. distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +.. WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +.. License for the specific language governing permissions and limitations under +.. the License. + +.. _configuring: + +============= +Configuration +============= + +.. todo:: Configuring CouchDB + +CouchDB Configuration Files +=========================== + +.. todo:: CouchDB Configuration Files + +Configuration File Locations +============================ + +CouchDB reads files from the following locations, in the following +order. + +1. ``PREFIX/default.ini`` + +2. ``PREFIX/default.d/*`` + +3. ``PREFIX/local.ini`` + +4. ``PREFIX/local.d/*`` + +Settings in successive documents override the settings in earlier +entries. For example, setting the ``bind_address`` parameter in +``local.ini`` would override any setting in ``default.ini``. + +.. warning:: + The ``default.ini`` file may be overwritten during an upgrade or + re-installation, so localised changes should be made to the + ``local.ini`` file or files within the ``local.d`` directory. + +.. _update-notifications: + +Update Notifications +==================== + +.. todo:: Update Notifications + + +MochiWeb Server Options +======================= + +Server options for the MochiWeb component of CouchDB can be added to the +configuration files. Settings should be added to the ``server_options`` +option of the ``[httpd]`` section of ``local.ini``. For example: + +.. code-block:: ini + + [httpd] + server_options = [{backlog, 128}, {acceptor_pool_size, 16}] + +Socket Options Configuration Setting +==================================== + +The socket options for the listening socket in CouchDB can now be set +within the CouchDB configuration file. The setting should be added to +the ``[httpd]`` section of the file using the option name +``socket_options``. The specification is as a list of tuples. For +example: + +.. code-block:: ini + + [httpd] + socket_options = [{recbuf, 262144}, {sndbuf, 262144}, {nodelay, true}] + +The options supported are a subset of full options supported by the +TCP/IP stack. A list of the supported options are provided in the +`Erlang inet`_ documentation. + +.. _Erlang inet: http://www.erlang.org/doc/man/inet.html#setopts-2 + +Virtual Hosts +============= + +CouchDB, since 0.11.0, can map requests to different locations based on +the ``Host`` header, even if they arrive on the some inbound IP address. + +This allows different virtual hosts on the same machine to map to different +databases or design documents, etc. The most common use case is to map a +virtual host to a Rewrite Handler, to provide full control over the +application's URIs. + +To add a virtual host, add a CNAME pointer to the DNS for your domain +name. For development and testing, it is sufficient to add an entry in +the hosts file, typically `/etc/hosts`` on Unix-like operating systems: + +.. code-block:: bash + + # CouchDB vhost definitions, refer to local.ini for further details + 127.0.0.1 sofa.couchdb + +Test that this is working: + +.. code-block:: bash + + $ ping sofa.couchdb + PING sofa.couchdb (127.0.0.1) 56(84) bytes of data. + 64 bytes from localhost.localdomain (127.0.0.1): icmp_req=1 ttl=64 time=0.025 ms + 64 bytes from localhost.localdomain (127.0.0.1): icmp_req=2 ttl=64 time=0.051 ms + ^C + +Finally, add an entry to your :ref:`configuration file ` in the ``[vhosts]`` +section: + +.. code-block:: ini + + [vhosts] + sofa.couchdb:5984 = /sofa/_design/sofa/_rewrite + +If your CouchDB is listening on the default HTTP port, or is sitting +behind a proxy, then don't specify a port number in the vhost key. + +With the above setup, a request to ``http://sofa.couchdb:5984/sweet-o`` +will be mapped to +``http://127.0.0.1:5984/sofa/_design/sofa/_rewrite/sweet-o`` + +.. versionadded:: 0.11.0 added `vhosts` functionality + +HTTP Rewrite Handler +==================== + +Following on from `virtual hosts`_, CouchDB includes a custom URL rewriter. +All rewriting is done from ``/dbname/_design/ddocname/_rewrite`` by default. + +The rewriter is flexible, and can handle methods and custom query formats. + +Each rule should be in the ``rewrites`` top-level key of the design doc. +Example of a complete rule : + +.. code-block:: json + + { + .... + "rewrites": [ + { + "from": "", + "to": "index.html", + "method": "GET", + "query": {} + } + ] + } + + +**from**: is the path rule used to bind current uri to the rule. It +uses pattern matching for that. + +**to**: rule to rewrite an url. It can contain variables depending on +binding variables discovered during pattern matching and query args +(url args and from the query member.) + +**method**: method to bind the request method to the rule. If method +is missing, any method will be matched in the rewrite. + +**query**: optional query arguments, that may contain dynamic variables, +by binding keys in the to be used with the matching URL. + +``to`` and ``from`` are paths with patterns. The pattern can be strings starting +with ``:`` or ``*``, for example ``/somepath/:var/*``. + +The pattern matching is done by first matching the request method to a +rule. Then it will try to match the path to one specific rule. If no rule +match, then a 404 error is displayed. + +The path is converted into an erlang list, by regex splitting on ``/``. Each +variable is converted into an atom. The subsequent pattern matching step is +done by splitting ``/`` in the request url into a list of atoms. A string +pattern will match the equivalent token. The ``*`` atom will match any number +of tokens, but may only be present as the last pattern in the path. If all +tokens are matched, and all path terms have been consumed, then the overall +path specification matches. + +Once a matching ``from`` rule is found we rewrite the request url using the +``from``, ``to``, and ``query`` members. Each identified token will be reused +within the rule, and in the subsequent query if required. The identified +tokens are matched to the rule and will replace var. If ``*`` is found in +the rule it will contain any remaining suffix. + +The rewriter is re-entrant, and has a configurable recursion limit, set +by default at 100. + +Configuring Server Administrators +================================= + +A default CouchDB install provides admin-level access to all connecting users. +This configuration is known as ``Admin Party``, and is not recommended for +in-production usage. You can crash the party simply by creating the first +admin account. CouchDB server administrators and passwords are not stored +in the ``_users`` database, but in the ``local.ini`` file, which should be +appropriately secured and readable only by system administrators. + +.. code-block:: ini + + [admins] + ;admin = mysecretpassword + admin = -hashed-6d3c30241ba0aaa4e16c6ea99224f915687ed8cd,7f4a3e05e0cbc6f48a0035e3508eef90 + architect = -pbkdf2-43ecbd256a70a3a2f7de40d2374b6c3002918834,921a12f74df0c1052b3e562a23cd227f,10000 + +Administrators can be added directly to the ``[admins]`` section, and when +CouchDB is restarted, the passwords will be salted and encrypted. You may +also use the HTTP interface to create administrator accounts; this way, +you don't need to restart CouchDB, and there's no need to temporarily store +or transmit passwords in plaintext. The HTTP ``_config/admins`` endpoint +supports querying, deleting or creating new admin accounts: + +.. code-block:: bash + + shell> GET /_config/admins HTTP/1.1 + Accept: application/json + Host: localhost:5984 + + HTTP/1.1 200 OK + Cache-Control: must-revalidate + Content-Length: 196 + Content-Type: application/json + Date: Fri, 30 Nov 2012 11:37:18 GMT + Server: CouchDB/1.3.0 (Erlang OTP/R15B02) + +.. code-block:: json + + { + "admin": "-hashed-6d3c30241ba0aaa4e16c6ea99224f915687ed8cd,7f4a3e05e0cbc6f48a0035e3508eef90", + "architect": "-pbkdf2-43ecbd256a70a3a2f7de40d2374b6c3002918834,921a12f74df0c1052b3e562a23cd227f,10000" + } + +If you already have a salted, encrypted password string (for example, +from an old ``local.ini`` file, or from a different CouchDB server), then +you can store the "raw" encrypted string, without having CouchDB doubly +encrypt it. + +.. code-block:: bash + + shell> PUT /_config/admins/architect?raw=true HTTP/1.1 + Accept: application/json + Content-Type: application/json + Content-Length: 89 + Host: localhost:5984 + + "-pbkdf2-43ecbd256a70a3a2f7de40d2374b6c3002918834,921a12f74df0c1052b3e562a23cd227f,10000" + + HTTP/1.1 200 OK + Cache-Control: must-revalidate + Content-Length: 89 + Content-Type: application/json + Date: Fri, 30 Nov 2012 11:39:18 GMT + Server: CouchDB/1.3.0 (Erlang OTP/R15B02) + +.. code-block:: json + + "-pbkdf2-43ecbd256a70a3a2f7de40d2374b6c3002918834,921a12f74df0c1052b3e562a23cd227f,10000" + +Further details are available in ``security_``, including configuring the +work factor for ``PBKDF2``, and the algorithm itself at +`PBKDF2 (RFC-2898) `_. + +.. versionadded:: + 1.3.0 ``PBKDF2`` server-side hashed salted password support added, + now as a synchronous call for the ``_config/admins`` API. + +OS Daemons +========== + +CouchDB now supports starting external processes. The support is simple +and enables CouchDB to start each configured OS daemon. If the daemon +stops at any point, CouchDB will restart it (with protection to ensure +regularly failing daemons are not repeatedly restarted). + +The daemon starting process is one-to-one; for each each configured +daemon in the configuration file, CouchDB will start exactly one +instance. If you need to run multiple instances, then you must create +separate individual configurations. Daemons are configured within the +``[os_daemons]`` section of your configuration file (``local.ini``). The +format of each configured daemon is: + +.. code-block:: ini + + NAME = PATH ARGS + +Where ``NAME`` is an arbitrary (and unique) name to identify the daemon; +``PATH`` is the full path to the daemon to be executed; ``ARGS`` are any +required arguments to the daemon. + +For example: + +.. code-block:: ini + + [os_daemons] + basic_responder = /usr/local/bin/responder.js + +There is no interactivity between CouchDB and the running process, but +you can use the OS Daemons service to create new HTTP servers and +responders and then use the new proxy service to redirect requests and +output to the CouchDB managed service. For more information on proxying, +see :ref:`http-proxying`. For further background on the OS Daemon service, see +`CouchDB Externals API`_. + +.. _CouchDB Externals API: http://davispj.com/2010/09/26/new-couchdb-externals-api.html + +Native SSL Support +================== + +CouchDB |version| supports SSL natively. All your secure connection needs can +now be served without needing to setup and maintain a separate proxy server +that handles SSL. + +SSL setup can be tricky, but the configuration in CouchDB was designed +to be as easy as possible. All you need is two files; a certificate and +a private key. If you bought an official SSL certificate from a +certificate authority, both should be in your possession already. + +If you just want to try this out and don't want to pay anything upfront, +you can create a self-signed certificate. Everything will work the same, +but clients will get a warning about an insecure certificate. + +You will need the OpenSSL command line tool installed. It probably +already is. + +:: + + shell> mkdir cert && cd cert + shell> openssl genrsa > privkey.pem + shell> openssl req -new -x509 -key privkey.pem -out mycert.pem -days 1095 + shell> ls + mycert.pem privkey.pem + +Now, you need to edit CouchDB's configuration, either by editing your +``local.ini`` file or using the ``/_config`` API calls or the +configuration screen in Futon. Here is what you need to do in +``local.ini``, you can infer what needs doing in the other places. + +Be sure to make these edits. Under ``[daemons]`` you should see: + +:: + + ; enable SSL support by uncommenting the following line and supply the PEM's below. + ; the default ssl port CouchDB listens on is 6984 + ;httpsd = {couch_httpd, start_link, [https]} + +Here uncomment the last line: + +:: + + httpsd = {couch_httpd, start_link, [https]} + +Next, under ``[ssl]`` you will see: + +:: + + ;cert_file = /full/path/to/server_cert.pem + ;key_file = /full/path/to/server_key.pem + +Uncomment and adjust the paths so it matches your system's paths: + +:: + + cert_file = /home/jan/cert/mycert.pem + key_file = /home/jan/cert/privkey.pem + +For more information please read +`http://www.openssl.org/docs/HOWTO/certificates.txt`_. + +Now start (or restart) CouchDB. You should be able to connect to it +using HTTPS on port 6984: + +:: + + shell> curl https://127.0.0.1:6984/ + curl: (60) SSL certificate problem, verify that the CA cert is OK. Details: + error:14090086:SSL routines:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed + More details here: http://curl.haxx.se/docs/sslcerts.html + + curl performs SSL certificate verification by default, using a "bundle" + of Certificate Authority (CA) public keys (CA certs). If the default + bundle file isn't adequate, you can specify an alternate file + using the --cacert option. + If this HTTPS server uses a certificate signed by a CA represented in + the bundle, the certificate verification probably failed due to a + problem with the certificate (it might be expired, or the name might + not match the domain name in the URL). + If you'd like to turn off curl's verification of the certificate, use + the -k (or --insecure) option. + +Oh no what happened?! — Remember, clients will notify their users that +your certificate is self signed. ``curl`` is the client in this case and +it notifies you. Luckily you trust yourself (don't you?) and you can +specify the ``-k`` option as the message reads: + +:: + + shell> curl -k https://127.0.0.1:6984/ + {"couchdb":"Welcome","version":"|version|"} + +All done. + +.. _`http://www.openssl.org/docs/HOWTO/certificates.txt`: http://www.openssl.org/docs/HOWTO/certificates.txt + +.. _http-proxying: + +HTTP Proxying +============= + +The HTTP proxy feature makes it easy to map and redirect different +content through your CouchDB URL. The proxy works by mapping a pathname +and passing all content after that prefix through to the configured +proxy address. + +Configuration of the proxy redirect is handled through the +``[httpd_global_handlers]`` section of the CouchDB configuration file +(typically ``local.ini``). The format is: + +.. code-block:: ini + + [httpd_global_handlers] + PREFIX = {couch_httpd_proxy, handle_proxy_req, <<"DESTINATION">>} + + +Where: + +- ``PREFIX`` + + Is the string that will be matched. The string can be any valid + qualifier, although to ensure that existing database names are not + overridden by a proxy configuration, you can use an underscore + prefix. + +- ``DESTINATION`` + + The fully-qualified URL to which the request should be sent. The + destination must include the ``http`` prefix. The content is used + verbatim in the original request, so you can also forward to servers + on different ports and to specific paths on the target host. + +The proxy process then translates requests of the form: + +.. code-block:: text + + http://couchdb:5984/PREFIX/path + +To: + +.. code-block:: text + + DESTINATION/path + +.. note:: + Everything after ``PREFIX`` including the required forward slash + will be appended to the ``DESTINATION``. + +The response is then communicated back to the original client. + +For example, the following configuration: + +.. code-block:: ini + + _google = {couch_httpd_proxy, handle_proxy_req, <<"http://www.google.com">>} + +Would forward all requests for ``http://couchdb:5984/_google`` to the +Google website. + +The service can also be used to forward to related CouchDB services, +such as Lucene: + +.. code-block:: ini + + [httpd_global_handlers] + _fti = {couch_httpd_proxy, handle_proxy_req, <<"http://127.0.0.1:5985">>} + +.. note:: + The proxy service is basic. If the request is not identified by the + ``DESTINATION``, or the remainder of the ``PATH`` specification is + incomplete, the original request URL is interpreted as if the + ``PREFIX`` component of that URL does not exist. + + For example, requesting ``http://couchdb:5984/_intranet/media`` when + ``/media`` on the proxy destination does not exist, will cause the + request URL to be interpreted as ``http://couchdb:5984/media``. Care + should be taken to ensure that both requested URLs and destination + URLs are able to cope. + +.. _cors: + +Cross-Origin Resource Sharing +============================= + +CORS, or "Cross-Origin Resource Sharing", allows a resource such as a web +page running JavaScript inside a browser, to make AJAX requests +(XMLHttpRequests) to a different domain, without compromising the security +of either party. + +A typical use case is to have a static website hosted on a CDN make +requests to another resource, such as a hosted CouchDB instance. This +avoids needing an intermediary proxy, using JSONP or similar workarounds +to retrieve and host content. + +While CouchDB's integrated HTTP server and support for document attachments +makes this less of a constraint for pure CouchDB projects, there are many +cases where separating the static content from the database access is +desirable, and CORS makes this very straightforward. + +By supporting CORS functionality, a CouchDB instance can accept direct +connections to protected databases and instances, without the browser +functionality being blocked due to same-origin constraints. CORS is +supported today on over 90% of recent browsers. + +CORS support is provided as experimental functionality in 1.3.0, and as such +will need to be enabled specifically in CouchDB's configuration. While all +origins are forbidden from making requests by default, support is available +for simple requests, preflight requests and per-vhost configuration. + +.. versionadded:: 1.3.0 + +Enabling CORS +------------- + +To enable CORS support, you need to set the ``enable_cors = true`` option +in the ``[httpd]`` section of ``local.ini``, and add a ``[cors]`` section +containing a ``origins = *`` setting. Note that by default, no origins are +accepted; you must either use a wildcard or whitelist. + +.. code-block:: ini + + [httpd] + enable_cors = true + + [cors] + origins = * + +Passing Credentials +------------------- + +By default, neither authentication headers nor cookies are included in +requests and responses. To do so requires both setting +`XmlHttpRequest.withCredentials = true` on the request object in the +browser and enabling credentials support in CouchDB. + +.. code-block:: ini + + [cors] + credentials = true + +CouchDB will respond to a credentials-enabled CORS request with an additional +header, `Access-Control-Allow-Credentials=true`. + +Tightening Access +----------------- + +Access can be restricted by protocol, host and optionally by port: + +.. code-block:: ini + + [cors] + ; List of origins, separated by a comma (protocol, host, optional port) + ; refer to http://tools.ietf.org/html/rfc6454 for specification + origins = http://localhost, https://localhost, http://www.number10.gov.uk:80 + +Specific HTTP methods may also be restricted: + +.. code-block:: ini + + [cors] + ; List of accepted methods, comma-separated + ; refer to http://tools.ietf.org/html/rfc2616, rfc2817, rfc5789 + methods = GET, POST, PUT, DELETE + +You can allow additional HTTP header fields to be allowed: + +.. code-block:: ini + + [cors] + ; List of accepted headers separated by a comma + headers = Authorization, Cookie + +Note that `Authorization` and `Cookie` are not part of the standard set. + +Configuration per vhost +----------------------- + +All CORS-related settings may be configured on a per-vhost basis. For example, +the configuration section for `http://example.com/` would be contained in: + +.. code-block:: ini + + [cors:http://example.com] + credentials = false + origins = * + methods = GET, PUT, HEAD + +Useful References +----------------- + +- Original JIRA `implementation ticket `_ + +Standards and References: + +- IETF RFCs relating to methods `RFC 2618 `_, + `RFC 2817 `_, and + `RFC 5789 `_ +- IETF RFC 6454 for `Web Origins `_ +- W3C `CORS standard `_ + +Mozilla Developer Network Resources: + +- `Same origin policy for URIs `_ +- `HTTP Access Control `_ +- `Server-side Access Control `_ +- `Javascript same origin policy `_ + +Client-side CORS support and usage: + +- `CORS browser support matrix `_ +- `CORS tutorial `_ +- `Cross-Site XMLHttpRequests with CORS `_ diff -Nru couchdb-1.2.0/share/doc/build/html/_sources/contributing.txt couchdb-1.4.0~rc.1/share/doc/build/html/_sources/contributing.txt --- couchdb-1.2.0/share/doc/build/html/_sources/contributing.txt 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_sources/contributing.txt 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,167 @@ +.. Licensed under the Apache License, Version 2.0 (the "License"); you may not +.. use this file except in compliance with the License. You may obtain a copy of +.. the License at +.. +.. http://www.apache.org/licenses/LICENSE-2.0 +.. +.. Unless required by applicable law or agreed to in writing, software +.. distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +.. WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +.. License for the specific language governing permissions and limitations under +.. the License. + +.. _contributing: + +================================== +Contributing to this Documentation +================================== + +The documentation lives in the CouchDB source tree. We'll start by forking and +closing the CouchDB GitHub mirror. That will allow us to send the contribution +to CouchDB with a pull request. + +If you don't have a GitHub account yet, it is a good time to get one, they are +free. If you don't want to use GitHub, there are alternate ways to +contributing back, that we'll cover next time. + +Go to https://github.com/apache/couchdb and click the "fork" button in the top +right. This will create a fork of CouchDB in your GitHub account. Mine is +`janl`, so my fork lives at https://github.com/janl/couchdb. In the header, it +tells me me my "GitHub Clone URL". We need to copy that and start a terminal: + +.. code-block:: bash + + $ git clone https://github.com/janl/couchdb.git + $ cd couchdb + $ subl . + +I'm opening the whole CouchDB source tree in my favourite editor. It gives +me the usual directory listing: + +.. code-block:: bash + + .git/ + .gitignore + .mailmap + .travis.yml + AUTHORS + BUGS + CHANGES + DEVELOPERS + INSTALL + INSTALL.Unix + INSTALL.Windows + LICENSE + Makefile.am + NEWS + NOTICE + README + THANKS.in + acinclude.m4.in + bin/ + bootstrap + build-aux/ + configure.ac + etc/ + license.skip + share/ + src/ + test/ + utils/ + var/ + +The documentation sources live in `share/doc/src`, you can safely ignore all +the other files and directories. + +First we should determine where we want to document this inside the +documentation. We can look through http://docs.couchdb.org/en/latest/ +for inspiration. The `JSON Structure Reference`_ looks like a fine place to write this up. + +.. _JSON Structure Reference: http://docs.couchdb.org/en/latest/json-structure.html + +The current state includes mostly tables describing the JSON structure (after +all, that's the title of this chapter), but some prose about the number +representation can't hurt. For future reference, since the topic in the thread +includes views and different encoding in views (as opposed to the storage +engine), we should remember to make a note in the views documentation as well, +but we'll leave this for later. + +Let's try and find the source file that builds the file +http://docs.couchdb.org/en/latest/json-structure.html -- we are in luck, under +`share/doc/src` we find the file `json-structure.rst`. That looks promising. +`.rst` stands for ReStructured Text (see +http://thomas-cokelaer.info/tutorials/sphinx/rest_syntax.html +for a markup reference), which is an ascii format for writing +documents, documentation in this case. Let's have a look and open it. + +We see ascii tables with some additional formatting, all looking like the +final HTML. So far so easy. For now, let's just add to the bottom of this. We +can worry about organising this better later. + +We start by adding a new headline:: + + Number Handling + =============== + +Now we paste in the rest of the main email of the thread. It is mostly text, +but it includes some code listings. Let's mark them up. We'll turn:: + + ejson:encode(ejson:decode(<<"1.1">>)). + <<"1.1000000000000000888">> + +Into:: + + .. code-block:: erlang + + ejson:encode(ejson:decode(<<"1.1">>)). + <<"1.1000000000000000888">> + +And we follow along with the other code samples. We turn:: + + Spidermonkey + + $ js -h 2>&1 | head -n 1 + JavaScript-C 1.8.5 2011-03-31 + $ js + js> JSON.stringify(JSON.parse("1.01234567890123456789012345678901234567890")) + "1.0123456789012346" + js> var f = JSON.stringify(JSON.parse("1.01234567890123456789012345678901234567890")) + js> JSON.stringify(JSON.parse(f)) + "1.0123456789012346" + +into:: + + Spidermonkey:: + + $ js -h 2>&1 | head -n 1 + JavaScript-C 1.8.5 2011-03-31 + $ js + js> JSON.stringify(JSON.parse("1.01234567890123456789012345678901234567890")) + "1.0123456789012346" + js> var f = JSON.stringify(JSON.parse("1.01234567890123456789012345678901234567890")) + js> JSON.stringify(JSON.parse(f)) + "1.0123456789012346" + +And then follow all the other ones. + +I cleaned up the text a little but to make it sound more like a documentation +entry as opposed to a post on a mailing list. + +The next step would be to validate that we got all the markup right. I'll +leave this for later. For now we'll contribute our change back to CouchDB. + +First, we commit our changes:: + + $ > git commit -am 'document number encoding' + [master a84b2cf] document number encoding + 1 file changed, 199 insertions(+) + +Then we push the commit to our CouchDB fork:: + + $ git push origin master + +Next, we go back to our GitHub page https://github.com/janl/couchdb and click +the "Pull Request" button. Fill in the description with something useful and +hit the "Send Pull Request" button. + +And we're done! diff -Nru couchdb-1.2.0/share/doc/build/html/_sources/ddocs.txt couchdb-1.4.0~rc.1/share/doc/build/html/_sources/ddocs.txt --- couchdb-1.2.0/share/doc/build/html/_sources/ddocs.txt 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_sources/ddocs.txt 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,751 @@ +.. Licensed under the Apache License, Version 2.0 (the "License"); you may not +.. use this file except in compliance with the License. You may obtain a copy of +.. the License at +.. +.. http://www.apache.org/licenses/LICENSE-2.0 +.. +.. Unless required by applicable law or agreed to in writing, software +.. distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +.. WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +.. License for the specific language governing permissions and limitations under +.. the License. + +.. default-domain:: js + +.. _ddocs: + +=========== +Design Docs +=========== + +In this section we'll show how to write design documents, using the built-in +:ref:`JavaScript Query Server `. + +But before we start to write our first function, let's take a look at the list +of common objects that will be used during our code journey - we'll be using +them extensively within each function: + +- :ref:`Database information object ` +- :ref:`Request object ` +- :ref:`Response object ` +- :ref:`UserCtx object ` +- :ref:`Database Security object ` +- :ref:`Guide to JavaScript Query Server ` + +.. _viewfun: + +View functions +============== + +Views are the primary tool used for querying and reporting on CouchDB databases. + +.. _mapfun: + +Map functions +------------- + +.. function:: mapfun(doc) + + :param doc: Processed document object. + +Map functions accept a single document as the argument and (optionally) :func:`emit` +key/value pairs that are stored in a view. + +.. code-block:: javascript + + function (doc) { + if (doc.type === 'post' && doc.tags && Array.isArray(doc.tags)) { + doc.tags.forEach(function (tag) { + emit(tag.toLowerCase(), 1); + }); + } + } + +In this example a key/value pair is emitted for each value in the `tags` array +of a document with a `type` of "post". Note that :func:`emit` may be called many +times for a single document, so the same document may be available by several +different keys. + +Also keep in mind that each document is *sealed* to prevent situation when one +map function changes document state and the other one received a modified +version. + +For efficiency reasons, documents are passed to a group of map functions - +each document is processed by group of map functions from all views of +related design document. This means that if you trigger index update for one +view in ddoc, all others will get updated too. + +Since `1.1.0` release `map` function supports +:ref:`CommonJS ` modules and access to :func:`require` function. + +.. _reducefun: + +Reduce and rereduce functions +----------------------------- + +.. function:: redfun(keys, values[, rereduce]) + + :param keys: Array of pairs docid-key for related map function result. + Always ``null`` if rereduce is running (has ``true`` value). + :param values: Array of map function result values. + :param rereduce: Boolean sign of rereduce run. + + :return: Reduces `values` + +Reduce functions takes two required arguments of keys and values lists - the +result of the related map function - and optional third one which indicates if +`rereduce` mode is active or not. `Rereduce` is using for additional reduce +values list, so when it is ``true`` there is no information about related `keys` +(first argument is ``null``). + +Note, that if produced result by `reduce` function is longer than initial +values list then a Query Server error will be raised. However, this behavior +could be disabled by setting ``reduce_limit`` config option to ``false``: + +.. code-block:: ini + + [query_server_config] + reduce_limit = false + +While disabling ``reduce_limit`` might be useful for debug proposes, remember, +that main task of reduce functions is to *reduce* mapped result, not to make it +even bigger. Generally, your reduce function should converge rapidly to a single +value - which could be an array or similar object. + +Also CouchDB has three built-in reduce functions. These are implemented in +Erlang and run right inside CouchDB, so they are much faster than the equivalent +JavaScript functions: ``_sum``, ``_count`` and ``_stats``. Their equivalents in +JavaScript below: + +.. code-block:: javascript + + // could be replaced by _sum + function(keys, values){ + sum(values); + } + + // could be replaced by _count + function(keys, values, rereduce){ + if (rereduce) { + return sum(values); + } else { + return values.length; + } + } + + // could be replaced by _stats + function(keys, values, rereduce){ + return { + 'sum': sum(values), + 'min': Math.min.apply(null, values), + 'max': Math.max.apply(null, values), + 'count': values.length, + 'sumsqr': (function(){ + var sumsqr = 0; + + values.forEach(function (value) { + sumsqr += value * value; + }); + + return sumsqr; + })(), + } + } + +.. note:: **Why don't reduce functions support CommonJS modules?** + + While `map` functions have limited access to stored modules through + :func:`require` function there is no such feature for `reduce` functions. + The reason lies deep inside in mechanism how `map` and `reduce` functions + are processed by Query Server. Let's take a look on `map` functions first: + + #. CouchDB sends all `map` functions for processed design document to + Query Server. + #. Query Server handles them one by one, compiles and puts them onto an + internal stack. + #. After all `map` functions had been processed, CouchDB will send the + remaining documents to index one by one. + #. The Query Server receives the document object and applies it to every function + from the stack. The emitted results are then joined into a single array and sent + back to CouchDB. + + Now let's see how `reduce` functions are handled: + + #. CouchDB sends *as single command* list of available `reduce` functions + with result list of key-value pairs that was previously received as + result of `map` functions work. + #. Query Server compiles reduce functions and applies them to key-value + lists. Reduced result sends back to CouchDB. + + As you may note, `reduce` functions been applied in single shot while + `map` ones are applied in an iterative way per each document. This means that + it's possible for `map` functions to precompile CommonJS libraries and use them + during the entire view processing, but for `reduce` functions it will be + compiled again and again for each view result reduction, which will lead to + performance degradation (`reduce` function are already does hard work to make + large result smaller). + + +.. _showfun: + +Show functions +============== + +.. function:: showfun(doc, req) + + :param doc: Processed document, may be omitted. + :param req: :ref:`Request object `. + + :return: :ref:`Response object ` + :rtype: object or string + +Show functions are used to represent documents in various formats, commonly as +HTML page with nicer formatting. They can also be used to run server-side functions +without requiring a pre-existing document. + +Basic example of show function could be: + +.. code-block:: javascript + + function(doc, req){ + if (doc) { + return "Hello from " + doc._id + "!"; + } else { + return "Hello, world!"; + } + } + +Also, there is more simple way to return json encoded data: + +.. code-block:: javascript + + function(doc, req){ + return { + 'json': { + 'id': doc['_id'], + 'rev': doc['_rev'] + } + } + } + + +and even files (this one is CouchDB logo): + +.. code-block:: javascript + + function(doc, req){ + return { + 'headers': { + 'Content-Type' : 'image/png', + }, + 'base64': ''.concat( + 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAAsV', + 'BMVEUAAAD////////////////////////5ur3rEBn////////////////wDBL/', + 'AADuBAe9EB3IEBz/7+//X1/qBQn2AgP/f3/ilpzsDxfpChDtDhXeCA76AQH/v7', + '/84eLyWV/uc3bJPEf/Dw/uw8bRWmP1h4zxSlD6YGHuQ0f6g4XyQkXvCA36MDH6', + 'wMH/z8/yAwX64ODeh47BHiv/Ly/20dLQLTj98PDXWmP/Pz//39/wGyJ7Iy9JAA', + 'AADHRSTlMAbw8vf08/bz+Pv19jK/W3AAAAg0lEQVR4Xp3LRQ4DQRBD0QqTm4Y5', + 'zMxw/4OleiJlHeUtv2X6RbNO1Uqj9g0RMCuQO0vBIg4vMFeOpCWIWmDOw82fZx', + 'vaND1c8OG4vrdOqD8YwgpDYDxRgkSm5rwu0nQVBJuMg++pLXZyr5jnc1BaH4GT', + 'LvEliY253nA3pVhQqdPt0f/erJkMGMB8xucAAAAASUVORK5CYII=') + } + } + +But what if you need to represent data in different formats via a single function? +Functions :func:`registerType` and :func:`provides` are your the best friends in +that question: + +.. code-block:: javascript + + function(doc, req){ + provides('json', function(){ + return {'json': doc} + }); + provides('html', function(){ + return '
' + toJSON(doc) + '
' + }) + provides('xml', function(){ + return { + 'headers': {'Content-Type': 'application/xml'}, + 'body' : ''.concat( + '\n', + '', + (function(){ + escape = function(s){ + return s.replace(/"/g, '"') + .replace(/>/g, '>') + .replace(/</g, '<') + .replace(/&/g, '&'); + }; + var content = ''; + for(var key in doc){ + if(!doc.hasOwnProperty(key)) continue; + var value = escape(toJSON(doc[key])); + var key = escape(key); + content += ''.concat( + '<' + key + '>', + value + '' + ) + } + return content; + })(), + '' + ) + } + }) + registerType('text-json', 'text/json') + provides('text-json', function(){ + return toJSON(doc); + }) + } + +This function may return `html`, `json` , `xml` or our custom `text json` format +representation of same document object with same processing rules. Probably, +the `xml` provider in our function needs more care to handle nested objects +correctly, and keys with invalid characters, but you've got the idea! + +.. seealso:: + + CouchDB Wiki: + - `Showing Documents `_ + + CouchDB Guide: + - `Show Functions `_ + + +.. _listfun: + +List functions +============== + +.. function:: listfun(head, req) + + :param head: :ref:`view_head_info_object` + :param req: :ref:`Request object `. + + :return: Last chunk. + :rtype: string + +While :ref:`showfun` are used to customize document presentation, :ref:`listfun` +are used for same purpose, but against :ref:`viewfun` results. + +The next list function formats view and represents it as a very simple HTML page: + +.. code-block:: javascript + + function(head, req){ + start({ + 'headers': { + 'Content-Type': 'text/html' + } + }); + send(''); + send('') + while(row = getRow()){ + send(''.concat( + '', + '', + '', + '', + '' + )); + } + send('
IDKeyValue
' + toJSON(row.id) + '' + toJSON(row.key) + '' + toJSON(row.value) + '
'); + } + +Templates and styles could obviously be used to present data in a nicer +fashion, but this is an excellent starting point. Note that you may also +use :func:`registerType` and :func:`provides` functions in the same +way as for :ref:`showfun`! + +.. seealso:: + + CouchDB Wiki: + - `Listing Views with CouchDB 0.10 and later `_ + + CouchDB Guide: + - `Transforming Views with List Functions `_ + + +.. _updatefun: + +Update functions +================ + +.. function:: updatefun(doc, req) + + :param doc: Update function target document. + :param req: :ref:`request_object` + + :returns: Two-element array: the first element is the (updated or new) + document, which is committed to the database. If the first element + is ``null`` no document will be committed to the database. + If you are updating an existing, it should already have an ``_id`` + set, and if you are creating a new document, make sure to set its + ``_id`` to something, either generated based on the input or the + ``req.uuid`` provided. The second element is the response that will + be sent back to the caller. + +Update handlers are functions that clients can request to invoke server-side +logic that will create or update a document. This feature allows a range of use +cases such as providing a server-side last modified timestamp, updating +individual fields in a document without first getting the latest revision, etc. + +When the request to an update handler includes a document ID in the URL, the +server will provide the function with the most recent version of that document. +You can provide any other values needed by the update handler function via the +``POST``/``PUT`` entity body or query string parameters of the request. + +The basic example that demonstrates all use-cases of update handlers below: + +.. code-block:: javascript + + function(doc, req){ + if (!doc){ + if ('id' in req){ + // create new document + return [{'_id': req['id']}, 'New World'] + } + // change nothing in database + return [null, 'Empty World'] + } + doc['world'] = 'hello'; + doc['edited_by'] = req['userCtx']['name'] + return [doc, 'Edited World!'] + } + +.. seealso:: + + CouchDB Wiki: + - `Document Update Handlers `_ + + +.. _filterfun: + +Filter functions +================ + +.. function:: filterfun(doc, req) + + :param doc: Processed document object. + :param req: :ref:`request_object` + :return: Boolean value: ``true`` means that `doc` passes the filter rules, + ``false`` that not. + +Filter functions are mostly acts like :ref:`showfun` and :ref:`listfun`: they +formats, but more correctly to say, they *filters* :ref:`changes feed`. + +Classic filters +--------------- + +By default the changes feed emits all database documents changes. But if you're +waiting for some special changes, processing all documents is inefficient. + +Filters are special design document functions that allows changes feed to emit +only specific documents that pass filter rules. + +Lets assume that our database is a mailbox and we need to to handle only new mails +(documents with status `new`) events. Assuming that, our filter function +will looks like next one: + +.. code-block:: javascript + + function(doc, req){ + // we need only `mail` documents + if (doc.type != 'mail'){ + return false; + } + // we're interested only in `new` ones + if (doc.status != 'new'){ + return false; + } + return true; // passed! + } +  +Filter functions must return ``true`` in fact if document passed all defined +rules. Now, if you apply this function to changes feed it will emit only changes +about "new mails":: + + GET /somedatabase/_changes?filter=mailbox/new_mail HTTP/1.1 + +.. code-block:: javascript + + {"results":[ + {"seq":1,"id":"df8eca9da37dade42ee4d7aa3401f1dd","changes":[{"rev":"1-c2e0085a21d34fa1cecb6dc26a4ae657"}]}, + {"seq":7,"id":"df8eca9da37dade42ee4d7aa34024714","changes":[{"rev":"1-29d748a6e87b43db967fe338bcb08d74"}]}, + ], + "last_seq":27} + +Note, that ``last_seq`` number is 27, but we'd received only two records. +Seems like any other changes was about documents that hasn't passed our filter. + +Probably, we also need to filter changes feed of our mailbox not only by single +status value: we're also interested in statuses like "spam" to update +spam-filter heuristic rules, "outgoing" to let mail daemon actually send mails +and so on. Creating a lot of similar functions that actually does similar work +isn't good idea - so we need dynamic filter to go. + +If you have noted, filter functions takes second argument as +:ref:`request ` object - it allows to create dynamic filters +based on query parameters, :ref:`user context ` and more. + +The dynamic version of our filter now will be next: + +.. code-block:: javascript + + function(doc, req){ + // we need only `mail` documents + if (doc.type != 'mail'){ + return false; + } + // we're interested only in requested status + if (doc.status != req.query.status){ + return false; + } + return true; // passed! + } + +and now we have pass `status` query parameter in request to let filter match +only required documents:: + + GET /somedatabase/_changes?filter=mailbox/by_status&status=new HTTP/1.1 + +.. code-block:: javascript + + {"results":[ + {"seq":1,"id":"df8eca9da37dade42ee4d7aa3401f1dd","changes":[{"rev":"1-c2e0085a21d34fa1cecb6dc26a4ae657"}]}, + {"seq":7,"id":"df8eca9da37dade42ee4d7aa34024714","changes":[{"rev":"1-29d748a6e87b43db967fe338bcb08d74"}]}, + ], + "last_seq":27} + +and we can change filter behavior with easy:: + + GET /somedatabase/_changes?filter=mailbox/by_status&status=spam HTTP/1.1 + +.. code-block:: javascript + + {"results":[ + {"seq":11,"id":"8960e91220798fc9f9d29d24ed612e0d","changes":[{"rev":"3-cc6ff71af716ddc2ba114967025c0ee0"}]}, + ], + "last_seq":27} + + +Combining filters with `continuous` feed allows to create powerful event-driven +systems. + +View filters +------------ + +View filters are the same as above, with one small difference: they use +views `map` function instead to `filter` one to process the changes feed. Each +time when a key-value pair could be emitted, a change is returned. This allows +to avoid creating filter functions that are mostly does same works as views. + +To use them just specify `_view` value for ``filter`` parameter and +`designdoc/viewname` for ``view`` one:: + + GET /somedatabase/_changes?filter=_view&view=dname/viewname HTTP/1.1 + +.. note:: + + Since view filters uses `map` functions as filters, they can't show any + dynamic behavior since :ref:`request object` is not + available. + +.. seealso:: + + CouchDB Guide: + - `Guide to filter change notification `_ + + CouchDB Wiki: + - `Filtered replication `_ + + +.. _vdufun: + +Validate document update functions +================================== + +.. function:: validatefun(newDoc, oldDoc, userCtx, secObj) + + :param newDoc: New version of document that will be stored. + :param oldDoc: Previous version of document that is already stored. + :param userCtx: :ref:`userctx_object` + :param secObj: :ref:`security_object` + + :throws: ``forbidden`` error to gracefully prevent document storing. + :throws: ``unauthorized`` error to prevent storage and allow the user to + re-auth. + +A design document may contain a function named `validate_doc_update` +which can be used to prevent invalid or unauthorized document update requests +from being stored. The function is passed the new document from the update +request, the current document stored in the database, a :ref:`userctx_object` +containing information about the user writing the document (if present), and +a :ref:`security_object` with lists of database security roles. + +Validation functions typically examine the structure of the new document to +ensure that required fields are present and to verify that the requesting user +should be allowed to make changes to the document properties. For example, +an application may require that a user must be authenticated in order to create +a new document or that specific document fields be present when a document +is updated. The validation function can abort the pending document write +by throwing one of two error objects: + +.. code-block:: javascript + + // user is not authorized to make the change but may re-authenticate + throw({ unauthorized: 'Error message here.' }); + + // change is not allowed + throw({ forbidden: 'Error message here.' }); + +Document validation is optional, and each design document in the database may +have at most one validation function. When a write request is received for +a given database, the validation function in each design document in that +database is called in an unspecified order. If any of the validation functions +throw an error, the write will not succeed. + +**Example**: The ``_design/_auth`` ddoc from `_users` database uses a validation +function to ensure that documents contain some required fields and are only +modified by a user with the ``_admin`` role: + +.. code-block:: javascript + + function(newDoc, oldDoc, userCtx, secObj) { + if (newDoc._deleted === true) { + // allow deletes by admins and matching users + // without checking the other fields + if ((userCtx.roles.indexOf('_admin') !== -1) || + (userCtx.name == oldDoc.name)) { + return; + } else { + throw({forbidden: 'Only admins may delete other user docs.'}); + } + } + + if ((oldDoc && oldDoc.type !== 'user') || newDoc.type !== 'user') { + throw({forbidden : 'doc.type must be user'}); + } // we only allow user docs for now + + if (!newDoc.name) { + throw({forbidden: 'doc.name is required'}); + } + + if (!newDoc.roles) { + throw({forbidden: 'doc.roles must exist'}); + } + + if (!isArray(newDoc.roles)) { + throw({forbidden: 'doc.roles must be an array'}); + } + + if (newDoc._id !== ('org.couchdb.user:' + newDoc.name)) { + throw({ + forbidden: 'Doc ID must be of the form org.couchdb.user:name' + }); + } + + if (oldDoc) { // validate all updates + if (oldDoc.name !== newDoc.name) { + throw({forbidden: 'Usernames can not be changed.'}); + } + } + + if (newDoc.password_sha && !newDoc.salt) { + throw({ + forbidden: 'Users with password_sha must have a salt.' + + 'See /_utils/script/couch.js for example code.' + }); + } + + var is_server_or_database_admin = function(userCtx, secObj) { + // see if the user is a server admin + if(userCtx.roles.indexOf('_admin') !== -1) { + return true; // a server admin + } + + // see if the user a database admin specified by name + if(secObj && secObj.admins && secObj.admins.names) { + if(secObj.admins.names.indexOf(userCtx.name) !== -1) { + return true; // database admin + } + } + + // see if the user a database admin specified by role + if(secObj && secObj.admins && secObj.admins.roles) { + var db_roles = secObj.admins.roles; + for(var idx = 0; idx < userCtx.roles.length; idx++) { + var user_role = userCtx.roles[idx]; + if(db_roles.indexOf(user_role) !== -1) { + return true; // role matches! + } + } + } + + return false; // default to no admin + } + + if (!is_server_or_database_admin(userCtx, secObj)) { + if (oldDoc) { // validate non-admin updates + if (userCtx.name !== newDoc.name) { + throw({ + forbidden: 'You may only update your own user document.' + }); + } + // validate role updates + var oldRoles = oldDoc.roles.sort(); + var newRoles = newDoc.roles.sort(); + + if (oldRoles.length !== newRoles.length) { + throw({forbidden: 'Only _admin may edit roles'}); + } + + for (var i = 0; i < oldRoles.length; i++) { + if (oldRoles[i] !== newRoles[i]) { + throw({forbidden: 'Only _admin may edit roles'}); + } + } + } else if (newDoc.roles.length > 0) { + throw({forbidden: 'Only _admin may set roles'}); + } + } + + // no system roles in users db + for (var i = 0; i < newDoc.roles.length; i++) { + if (newDoc.roles[i][0] === '_') { + throw({ + forbidden: + 'No system roles (starting with underscore) in users db.' + }); + } + } + + // no system names as names + if (newDoc.name[0] === '_') { + throw({forbidden: 'Username may not start with underscore.'}); + } + + var badUserNameChars = [':']; + + for (var i = 0; i < badUserNameChars.length; i++) { + if (newDoc.name.indexOf(badUserNameChars[i]) >= 0) { + throw({forbidden: 'Character `' + badUserNameChars[i] + + '` is not allowed in usernames.'}); + } + } + } + +.. note:: + + The ``return`` statement used only for function, it has no impact on + the validation process. + +.. seealso:: + + CouchDB Guide: + - `Validation Functions `_ + + CouchDB Wiki: + - `Document Update Validation `_ diff -Nru couchdb-1.2.0/share/doc/build/html/_sources/index.txt couchdb-1.4.0~rc.1/share/doc/build/html/_sources/index.txt --- couchdb-1.2.0/share/doc/build/html/_sources/index.txt 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_sources/index.txt 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,46 @@ +.. Licensed under the Apache License, Version 2.0 (the "License"); you may not +.. use this file except in compliance with the License. You may obtain a copy of +.. the License at +.. +.. http://www.apache.org/licenses/LICENSE-2.0 +.. +.. Unless required by applicable law or agreed to in writing, software +.. distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +.. WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +.. License for the specific language governing permissions and limitations under +.. the License. + +Introduction +============ + +|Apache CouchDB(TM)|_ is a document database built for the web. + +If you would like to help document the project, please send a note to the +`developer mailing list `_. + +This is a work in progress. + +Contents +======== + +.. toctree:: + :maxdepth: 2 + :numbered: + + intro + api-basics + configuring + replication + replicator + ddocs + query-servers + changes + api/reference + json-structure + config_reference + contributing + changelog + +.. This is how you get a TM sign into a link. Haha. Seriously. +.. |Apache CouchDB(TM)| unicode:: Apache U+0020 CouchDB U+2122 +.. _Apache CouchDB(TM): http://couchdb.apache.org/ diff -Nru couchdb-1.2.0/share/doc/build/html/_sources/intro.txt couchdb-1.4.0~rc.1/share/doc/build/html/_sources/intro.txt --- couchdb-1.2.0/share/doc/build/html/_sources/intro.txt 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_sources/intro.txt 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,309 @@ +.. Licensed under the Apache License, Version 2.0 (the "License"); you may not +.. use this file except in compliance with the License. You may obtain a copy of +.. the License at +.. +.. http://www.apache.org/licenses/LICENSE-2.0 +.. +.. Unless required by applicable law or agreed to in writing, software +.. distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +.. WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +.. License for the specific language governing permissions and limitations under +.. the License. + +============ +Introduction +============ + +There are two interfaces to CouchDB, the built-in Futon web-based +interface and the CouchDB API accessed through the HTTP REST interface. +The former is the simplest way to view and monitor your CouchDB +installation and perform a number of basic database and system +operations. More information on using the Futon interface can be found +in :ref:`using-futon`. + +The primary way to interact with the CouchDB API is to use a client +library or other interface that provides access to the underlying +functionality through your chosen language or platform. However, since +the API is supported through HTTP REST, you can interact with your +CouchDB with any solution that supports the HTTP protocol. + +There are a number of different tools that talk the HTTP protocol and +allow you to set and configure the necessary information. One tool for +this that allows for access from the command-line is ``curl``. See +:ref:`using-curl`. + +.. _using-futon: + +Using Futon +=========== + +Futon is a native web-based interface built into CouchDB. It provides a +basic interface to the majority of the functionality, including the +ability to create, update, delete and view documents and views, provides +access to the configuration parameters, and an interface for initiating +replication. + +The default view is the Overview page which provides you with a list of +the databases. The basic structure of the page is consistent regardless +of the section you are in. The main panel on the left provides the main +interface to the databases, configuration or replication systems. The +side panel on the right provides navigation to the main areas of Futon +interface: + +.. figure:: ../images/futon-overview.png + :align: center + :alt: Futon Overview + + Futon Overview + +The main sections are: + +- Overview + + The main overview page, which provides a list of the databases and + provides the interface for querying the database and creating and + updating documents. See :ref:`futon-management`. + +- Configuration + + An interface into the configuration of your CouchDB installation. The + interface allows you to edit the different configurable parameters. + For more details on configuration, see :ref:`configuring`. + +- Replicator + + An interface to the replication system, enabling you to initiate + replication between local and remote databases. See + :ref:`futon-replication`. + +- Status + + Displays a list of the running background tasks on the server. + Background tasks include view index building, compaction and + replication. The Status page is an interface to the + :ref:`Active Tasks ` API call. + +- Verify Installation + + The Verify Installation allows you to check whether all of the + components of your CouchDB installation are correctly installed. + +- Test Suite + + The Test Suite section allows you to run the built-in test suite. + This executes a number of test routines entirely within your browser + to test the API and functionality of your CouchDB installation. If + you select this page, you can run the tests by using the Run All + button. This will execute all the tests, which may take some time. + +.. _futon-management: + +Managing Databases and Documents +-------------------------------- + +You can manage databases and documents within Futon using the main +Overview section of the Futon interface. + +To create a new database, click the Create Database ELLIPSIS button. You +will be prompted for the database name, as shown in the figure below. + +.. figure:: ../images/futon-createdb.png + :align: center + :alt: Creating a Database + + Creating a Database + +Once you have created the database (or selected an existing one), you +will be shown a list of the current documents. If you create a new +document, or select an existing document, you will be presented with the +edit document display. + +Editing documents within Futon requires selecting the document and then +editing (and setting) the fields for the document individually before +saving the document back into the database. + +For example, the figure below shows the editor for a single document, a +newly created document with a single ID, the document ``_id`` field. + +.. figure:: ../images/futon-editdoc.png + :align: center + :alt: Editing a Document + + Editing a Document + +To add a field to the document: + +1. Click Add Field. + +2. In the fieldname box, enter the name of the field you want to create. + For example, “company”. + +3. Click the green tick next to the field name to confirm the field name + change. + +4. Double-click the corresponding Value cell. + +5. Enter a company name, for example “Example”. + +6. Click the green tick next to the field value to confirm the field + value. + +7. The document is still not saved as this point. You must explicitly + save the document by clicking the Save Document button at the top of + the page. This will save the document, and then display the new + document with the saved revision information (the ``_rev`` field). + + .. figure:: ../images/futon-editeddoc.png + :align: center + :alt: Edited Document + + Edited Document + +The same basic interface is used for all editing operations within Futon. +You *must* remember to save the individual element (fieldname, value) +using the green tick button, before then saving the document. + +.. _futon-replication: + +Configuring Replication +----------------------- + +When you click the Replicator option within the Tools menu you are +presented with the Replicator screen. This allows you to start +replication between two databases by filling in or select the +appropriate options within the form provided. + +.. figure:: ../images/futon-replform.png + :align: center + :alt: Replication Form + + Replication Form + +To start a replication process, either the select the local database or +enter a remote database name into the corresponding areas of the form. +Replication occurs from the database on the left to the database on the +right. + +If you are specifying a remote database name, you must specify the full +URL of the remote database (including the host, port number and database +name). If the remote instance requires authentication, you can specify +the username and password as part of the URL, for example +``http://username:pass@remotehost:5984/demo``. + +To enable continuous replication, click the Continuous checkbox. + +To start the replication process, click the Replicate button. The +replication process should start and will continue in the background. If +the replication process will take a long time, you can monitor the +status of the replication using the Status option under the Tools menu. + +Once replication has been completed, the page will show the information +returned when the replication process completes by the API. + +The Replicator tool is an interface to the underlying replication API. +For more information, see :ref:`replicate`. For more information on +replication, see :ref:`replication`. + +.. _using-curl: + +Using ``curl`` +============== + +The ``curl`` utility is a command line tool available on Unix, Linux, +Mac OS X and Windows and many other platforms. ``curl`` provides easy +access to the HTTP protocol (among others) directly from the +command-line and is therefore an ideal way of interacting with CouchDB +over the HTTP REST API. + +For simple ``GET`` requests you can supply the URL of the request. For +example, to get the database information: + +.. code-block:: bash + + shell> curl http://127.0.0.1:5984 + +This returns the database information (formatted in the output below for +clarity): + +.. code-block:: json + + { + "couchdb" : "Welcome", + "version" : "|version|", + } + +.. note:: For some URLs, especially those that include special characters such + as ampersand, exclamation mark, or question mark, you should quote + the URL you are specifying on the command line. For example: + + .. code-block:: bash + + shell> curl 'http://couchdb:5984/_uuids?count=5' + +You can explicitly set the HTTP command using the ``-X`` command line +option. For example, when creating a database, you set the name of the +database in the URL you send using a PUT request: + +.. code-block:: bash + + shell> curl -X PUT http://127.0.0.1:5984/demo + {"ok":true} + +But to obtain the database information you use a ``GET`` request (with +the return information formatted for clarity): + +.. code-block:: bash + + shell> curl -X GET http://127.0.0.1:5984/demo + { + "compact_running" : false, + "doc_count" : 0, + "db_name" : "demo", + "purge_seq" : 0, + "committed_update_seq" : 0, + "doc_del_count" : 0, + "disk_format_version" : 5, + "update_seq" : 0, + "instance_start_time" : "1306421773496000", + "disk_size" : 79 + } + +For certain operations, you must specify the content type of request, +which you do by specifying the ``Content-Type`` header using the ``-H`` +command-line option: + +.. code-block:: bash + + shell> curl -H 'Content-Type: application/json' http://127.0.0.1:5984/_uuids + +You can also submit 'payload' data, that is, data in the body of the +HTTP request using the ``-d`` option. This is useful if you need to +submit JSON structures, for example document data, as part of the +request. For example, to submit a simple document to the ``demo`` +database: + +.. code-block:: bash + + shell> curl -H 'Content-Type: application/json' \ + -X POST http://127.0.0.1:5984/demo \ + -d '{"company": "Example, Inc."}' + {"ok":true,"id":"8843faaf0b831d364278331bc3001bd8", + "rev":"1-33b9fbce46930280dab37d672bbc8bb9"} + +In the above example, the argument after the ``-d`` option is the JSON +of the document we want to submit. + +The document can be accessed by using the automatically generated +document ID that was returned: + +.. code-block:: bash + + shell> curl -X GET http://127.0.0.1:5984/demo/8843faaf0b831d364278331bc3001bd8 + {"_id":"8843faaf0b831d364278331bc3001bd8", + "_rev":"1-33b9fbce46930280dab37d672bbc8bb9", + "company":"Example, Inc."} + +The API samples in the :ref:`api-basics` show the HTTP command, URL and any +payload information that needs to be submitted (and the expected return +value). All of these examples can be reproduced using ``curl`` with the +command-line examples shown above. diff -Nru couchdb-1.2.0/share/doc/build/html/_sources/json-structure.txt couchdb-1.4.0~rc.1/share/doc/build/html/_sources/json-structure.txt --- couchdb-1.2.0/share/doc/build/html/_sources/json-structure.txt 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_sources/json-structure.txt 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,825 @@ +.. Licensed under the Apache License, Version 2.0 (the "License"); you may not +.. use this file except in compliance with the License. You may obtain a copy of +.. the License at +.. +.. http://www.apache.org/licenses/LICENSE-2.0 +.. +.. Unless required by applicable law or agreed to in writing, software +.. distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +.. WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +.. License for the specific language governing permissions and limitations under +.. the License. + +======================== +JSON Structure Reference +======================== + +The following appendix provides a quick reference to all the JSON structures +that you can supply to CouchDB, or get in return to requests. + +All Database Documents +====================== + ++--------------------------------+---------------------------------------------+ +| Field | Description | ++================================+=============================================+ +| total_rows | Number of documents in the database/view | ++--------------------------------+---------------------------------------------+ +| offset | Offset where the document list started | ++--------------------------------+---------------------------------------------+ +| update_seq (optional) | Current update sequence for the database | ++--------------------------------+---------------------------------------------+ +| rows [array] | Array of document object | ++--------------------------------+---------------------------------------------+ + +Bulk Document Response +====================== + ++--------------------------------+---------------------------------------------+ +| Field | Description | ++================================+=============================================+ +| docs [array] | Bulk Docs Returned Documents | ++--------------------------------+---------------------------------------------+ +| id | Document ID | ++--------------------------------+---------------------------------------------+ +| error | Error type | ++--------------------------------+---------------------------------------------+ +| reason | Error string with extended reason | ++--------------------------------+---------------------------------------------+ + +Bulk Documents +============== + ++--------------------------------+---------------------------------------------+ +| Field | Description | ++================================+=============================================+ +| all_or_nothing (optional) | Sets the database commit mode to use | +| | all-or-nothing semantics | ++--------------------------------+---------------------------------------------+ +| docs [array] | Bulk Documents Document | ++--------------------------------+---------------------------------------------+ +| _id (optional) | Document ID | ++--------------------------------+---------------------------------------------+ +| _rev (optional) | Revision ID (when updating an existing | +| | document) | ++--------------------------------+---------------------------------------------+ +| _deleted (optional) | Whether the document should be deleted | ++--------------------------------+---------------------------------------------+ + +Changes information for a database +================================== + ++--------------------------------+---------------------------------------------+ +| Field | Description | ++================================+=============================================+ +| last_seq | Last change sequence number | ++--------------------------------+---------------------------------------------+ +| results [array] | Changes made to a database | ++--------------------------------+---------------------------------------------+ +| seq | Update sequence number | ++--------------------------------+---------------------------------------------+ +| id | Document ID | ++--------------------------------+---------------------------------------------+ +| changes [array] | List of changes, field-by-field, for this | +| | document | ++--------------------------------+---------------------------------------------+ + +CouchDB Document +================ + ++--------------------------------+---------------------------------------------+ +| Field | Description | ++================================+=============================================+ +| _id (optional) | Document ID | ++--------------------------------+---------------------------------------------+ +| _rev (optional) | Revision ID (when updating an existing | +| | document) | ++--------------------------------+---------------------------------------------+ + +CouchDB Error Status +==================== + ++--------------------------------+---------------------------------------------+ +| Field | Description | ++================================+=============================================+ +| id | Document ID | ++--------------------------------+---------------------------------------------+ +| error | Error type | ++--------------------------------+---------------------------------------------+ +| reason | Error string with extended reason | ++--------------------------------+---------------------------------------------+ + +.. _dbinfo_object: + +CouchDB database information object +=================================== + ++--------------------------------+---------------------------------------------+ +| Field | Description | ++================================+=============================================+ +| db_name | The name of the database. | ++--------------------------------+---------------------------------------------+ +| committed_update_seq | The number of committed update. | ++--------------------------------+---------------------------------------------+ +| doc_count | A count of the documents in the specified | +| | database. | ++--------------------------------+---------------------------------------------+ +| doc_del_count | Number of deleted documents | ++--------------------------------+---------------------------------------------+ +| compact_running | Set to true if the database compaction | +| | routine is operating on this database. | ++--------------------------------+---------------------------------------------+ +| disk_format_version | The version of the physical format used for | +| | the data when it is stored on disk. | ++--------------------------------+---------------------------------------------+ +| disk_size | Size in bytes of the data as stored on the | +| | disk. Views indexes are not included in the | +| | calculation. | ++--------------------------------+---------------------------------------------+ +| instance_start_time | Timestamp of when the database was opened, | +| | expressed in microseconds since the epoch. | ++--------------------------------+---------------------------------------------+ +| purge_seq | The number of purge operations on the | +| | database. | ++--------------------------------+---------------------------------------------+ +| update_seq | The current number of updates to the | +| | database. | ++--------------------------------+---------------------------------------------+ + +Design Document +=============== + ++--------------------------------+---------------------------------------------+ +| Field | Description | ++================================+=============================================+ +| _id | Design Document ID | ++--------------------------------+---------------------------------------------+ +| _rev | Design Document Revision | ++--------------------------------+---------------------------------------------+ +| views | View | ++--------------------------------+---------------------------------------------+ +| viewname | View Definition | ++--------------------------------+---------------------------------------------+ +| map | Map Function for View | ++--------------------------------+---------------------------------------------+ +| reduce (optional) | Reduce Function for View | ++--------------------------------+---------------------------------------------+ + +Design Document Information +=========================== + ++--------------------------------+---------------------------------------------+ +| Field | Description | ++================================+=============================================+ +| name | Name/ID of Design Document | ++--------------------------------+---------------------------------------------+ +| view_index | View Index | ++--------------------------------+---------------------------------------------+ +| compact_running | Indicates whether a compaction routine is | +| | currently running on the view | ++--------------------------------+---------------------------------------------+ +| disk_size | Size in bytes of the view as stored on disk | ++--------------------------------+---------------------------------------------+ +| language | Language for the defined views | ++--------------------------------+---------------------------------------------+ +| purge_seq | The purge sequence that has been processed | ++--------------------------------+---------------------------------------------+ +| signature | MD5 signature of the views for the design | +| | document | ++--------------------------------+---------------------------------------------+ +| update_seq | The update sequence of the corresponding | +| | database that has been indexed | ++--------------------------------+---------------------------------------------+ +| updater_running | Indicates if the view is currently being | +| | updated | ++--------------------------------+---------------------------------------------+ +| waiting_clients | Number of clients waiting on views from this| +| | design document | ++--------------------------------+---------------------------------------------+ +| waiting_commit | Indicates if there are outstanding commits | +| | to the underlying database that need to | +| | processed | ++--------------------------------+---------------------------------------------+ + +Document with Attachments +========================= + ++--------------------------------+---------------------------------------------+ +| Field | Description | ++================================+=============================================+ +| _id (optional) | Document ID | ++--------------------------------+---------------------------------------------+ +| _rev (optional) | Revision ID (when updating an existing | +| | document) | ++--------------------------------+---------------------------------------------+ +| _attachments (optional) | Document Attachment | ++--------------------------------+---------------------------------------------+ +| filename | Attachment information | ++--------------------------------+---------------------------------------------+ +| content_type | MIME Content type string | ++--------------------------------+---------------------------------------------+ +| data | File attachment content, Base64 encoded | ++--------------------------------+---------------------------------------------+ + +List of Active Tasks +==================== + ++--------------------------------+---------------------------------------------+ +| Field | Description | ++================================+=============================================+ +| tasks [array] | Active Task | ++--------------------------------+---------------------------------------------+ +| pid | Process ID | ++--------------------------------+---------------------------------------------+ +| status | Task status message | ++--------------------------------+---------------------------------------------+ +| task | Task name | ++--------------------------------+---------------------------------------------+ +| type | Operation Type | ++--------------------------------+---------------------------------------------+ + +.. _replication-settings: + +Replication Settings +==================== + ++--------------------------------+---------------------------------------------+ +| Field | Description | ++================================+=============================================+ +| source | Source database name or URL | ++--------------------------------+---------------------------------------------+ +| target | Target database name or URL | ++--------------------------------+---------------------------------------------+ +| create_target (optional) | Creates the target database | ++--------------------------------+---------------------------------------------+ +| continuous (optional) | Configure the replication to be continuous | ++--------------------------------+---------------------------------------------+ +| cancel (optional) | Cancels the replication | ++--------------------------------+---------------------------------------------+ +| doc_ids (optional) | Array of document IDs to be synchronized | ++--------------------------------+---------------------------------------------+ +| proxy (optional) | Address of a proxy server through which | +| | replication should occur | ++--------------------------------+---------------------------------------------+ +| since_seq (optional) | Sequence from which the replication should | +| | start | ++--------------------------------+---------------------------------------------+ +| filter (optional) | name of the filter function in the form of | +| | ddoc/myfilter | ++--------------------------------+---------------------------------------------+ +| query_params (optional) | query parameter that are passed to the | +| | filter function; value should be a document | +| | containing parameters as members | ++--------------------------------+---------------------------------------------+ + +.. _replication-status: + +Replication Status +================== + ++--------------------------------+---------------------------------------------+ +| Field | Description | ++================================+=============================================+ +| ok | Replication status | ++--------------------------------+---------------------------------------------+ +| session_id | Unique session ID | ++--------------------------------+---------------------------------------------+ +| source_last_seq | Last sequence number read from source | +| | database | ++--------------------------------+---------------------------------------------+ +| history [array] | Replication History | ++--------------------------------+---------------------------------------------+ +| session_id | Session ID for this replication operation | ++--------------------------------+---------------------------------------------+ +| recorded_seq | Last recorded sequence number | ++--------------------------------+---------------------------------------------+ +| docs_read | Number of documents read | ++--------------------------------+---------------------------------------------+ +| docs_written | Number of documents written to target | ++--------------------------------+---------------------------------------------+ +| doc_write_failures | Number of document write failures | ++--------------------------------+---------------------------------------------+ +| start_time | Date/Time replication operation started | ++--------------------------------+---------------------------------------------+ +| start_last_seq | First sequence number in changes stream | ++--------------------------------+---------------------------------------------+ +| end_time | Date/Time replication operation completed | ++--------------------------------+---------------------------------------------+ +| end_last_seq | Last sequence number in changes stream | ++--------------------------------+---------------------------------------------+ +| missing_checked | Number of missing documents checked | ++--------------------------------+---------------------------------------------+ +| missing_found | Number of missing documents found | ++--------------------------------+---------------------------------------------+ + +.. _request_object: + +Request object +============== + ++--------------------------------+---------------------------------------------+ +| Field | Description | ++================================+=============================================+ +| body | Request body data as `string`. | +| | If request method is `GET` method contains | +| | this field contains ``"undefined"`` value, | +| | while if `DELETE` or `HEAD` value is ``""`` | +| | (empty string) | ++--------------------------------+---------------------------------------------+ +| cookie | Cookies `object`. | ++--------------------------------+---------------------------------------------+ +| form | Form data `object`. | +| | Contains decoded body as key-value pairs if | +| | `Content-Type` header was | +| | ``application/x-www-form-urlencoded``. | ++--------------------------------+---------------------------------------------+ +| headers | Request headers `object`. | ++--------------------------------+---------------------------------------------+ +| id | Requested document id `string` if it was | +| | specified or ``null`` otherwise. | ++--------------------------------+---------------------------------------------+ +| info | :ref:`Database information ` | ++--------------------------------+---------------------------------------------+ +| method | Request method as `string` or `array`. | +| | String value is method is one of: `HEAD`, | +| | `GET`, `POST`, `PUT`, `DELETE`, `OPTIONS`, | +| | and `TRACE`, otherwise it will be | +| | represented as array of char codes. | ++--------------------------------+---------------------------------------------+ +| path | List of requested path sections. | ++--------------------------------+---------------------------------------------+ +| peer | Request source IP address. | ++--------------------------------+---------------------------------------------+ +| query | URL query parameters `object`. | +| | Note that multiple keys not supported and | +| | last key value suppress others. | ++--------------------------------+---------------------------------------------+ +| requested_path | List of actual requested path section. | ++--------------------------------+---------------------------------------------+ +| raw_path | Raw requested path `string`. | ++--------------------------------+---------------------------------------------+ +| secObj | :ref:`security_object`. | ++--------------------------------+---------------------------------------------+ +| userCtx | :ref:`userctx_object`. | ++--------------------------------+---------------------------------------------+ +| uuid | Generated UUID by specified algorithm in | +| | config file. | ++--------------------------------+---------------------------------------------+ + +.. code-block:: javascript + + { + "body": "undefined", + "cookie": { + "AuthSession": "cm9vdDo1MDZBRjQzRjrfcuikzPRfAn-EA37FmjyfM8G8Lw", + "m": "3234" + }, + "form": {}, + "headers": { + "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", + "Accept-Charset": "ISO-8859-1,utf-8;q=0.7,*;q=0.3", + "Accept-Encoding": "gzip,deflate,sdch", + "Accept-Language": "en-US,en;q=0.8", + "Connection": "keep-alive", + "Cookie": "m=3234:t|3247:t|6493:t|6967:t|34e2:|18c3:t|2c69:t|5acb:t|ca3:t|c01:t|5e55:t|77cb:t|2a03:t|1d98:t|47ba:t|64b8:t|4a01:t; AuthSession=cm9vdDo1MDZBRjQzRjrfcuikzPRfAn-EA37FmjyfM8G8Lw", + "Host": "127.0.0.1:5984", + "User-Agent": "Mozilla/5.0 (Windows NT 5.2) AppleWebKit/535.7 (KHTML, like Gecko) Chrome/16.0.912.75 Safari/535.7" + }, + "id": "foo", + "info": { + "committed_update_seq": 2701412, + "compact_running": false, + "data_size": 7580843252, + "db_name": "mailbox", + "disk_format_version": 6, + "disk_size": 14325313673, + "doc_count": 2262757, + "doc_del_count": 560, + "instance_start_time": "1347601025628957", + "purge_seq": 0, + "update_seq": 2701412 + }, + "method": "GET", + "path": [ + "mailbox", + "_design", + "request", + "_show", + "dump", + "foo" + ], + "peer": "127.0.0.1", + "query": {}, + "raw_path": "/mailbox/_design/request/_show/dump/foo", + "requested_path": [ + "mailbox", + "_design", + "request", + "_show", + "dump", + "foo" + ], + "secObj": { + "admins": { + "names": [ + "Bob" + ], + "roles": [] + }, + "members": { + "names": [ + "Mike", + "Alice" + ], + "roles": [] + } + }, + "userCtx": { + "db": "mailbox", + "name": "Mike", + "roles": [ + "user" + ] + }, + "uuid": "3184f9d1ea934e1f81a24c71bde5c168" + } + + +.. _response_object: + +Response object +=============== + ++--------------------------------+---------------------------------------------+ +| Field | Description | ++================================+=============================================+ +| code | HTTP status code `number`. | ++--------------------------------+---------------------------------------------+ +| json | JSON encodable `object`. | +| | Implicitly sets `Content-Type` header as | +| | ``application/json``. | ++--------------------------------+---------------------------------------------+ +| body | Raw response text `string`. | +| | Implicitly sets `Content-Type` header as | +| | ``text/html; charset=utf-8``. | ++--------------------------------+---------------------------------------------+ +| base64 | Base64 encoded `string`. | +| | Implicitly sets `Content-Type` header as | +| | ``application/binary``. | ++--------------------------------+---------------------------------------------+ +| headers | Response headers `object`. | +| | `Content-Type` header from this object | +| | overrides any implicitly assigned one. | ++--------------------------------+---------------------------------------------+ +| stop | `boolean` signal to stop iteration over | +| | view result rows (for list functions only) | ++--------------------------------+---------------------------------------------+ + +.. warning:: + ``body``, ``base64`` and ``json`` object keys are overlaps each other and + the last wins. Since most realizations of key-value objects doesn't preserve + key order mixing them may create confusing situation. Try to use only one of + them. + +.. note:: + Any custom property makes CouchDB raise internal exception. + Also `Response object` could be a simple string value which would be + implicitly wrapped into ``{"body": ...}`` object. + + +Returned CouchDB Document with Detailed Revision Info +===================================================== + ++--------------------------------+---------------------------------------------+ +| Field | Description | ++================================+=============================================+ +| _id (optional) | Document ID | ++--------------------------------+---------------------------------------------+ +| _rev (optional) | Revision ID (when updating an existing | +| | document) | ++--------------------------------+---------------------------------------------+ +| _revs_info [array] | CouchDB Document Extended Revision Info | ++--------------------------------+---------------------------------------------+ +| rev | Full revision string | ++--------------------------------+---------------------------------------------+ +| status | Status of the revision | ++--------------------------------+---------------------------------------------+ + +Returned CouchDB Document with Revision Info +============================================ + ++--------------------------------+---------------------------------------------+ +| Field | Description | ++================================+=============================================+ +| _id (optional) | Document ID | ++--------------------------------+---------------------------------------------+ +| _rev (optional) | Revision ID (when updating an existing | +| | document) | ++--------------------------------+---------------------------------------------+ +| _revisions | CouchDB Document Revisions | ++--------------------------------+---------------------------------------------+ +| ids [array] | Array of valid revision IDs, in reverse | +| | order (latest first) | ++--------------------------------+---------------------------------------------+ +| start | Prefix number for the latest revision | ++--------------------------------+---------------------------------------------+ + +Returned Document with Attachments +================================== + ++--------------------------------+---------------------------------------------+ +| Field | Description | ++================================+=============================================+ +| _id (optional) | Document ID | ++--------------------------------+---------------------------------------------+ +| _rev (optional) | Revision ID (when updating an existing | +| | document) | ++--------------------------------+---------------------------------------------+ +| _attachments (optional) | Document Attachment | ++--------------------------------+---------------------------------------------+ +| filename | Attachment | ++--------------------------------+---------------------------------------------+ +| stub | Indicates whether the attachment is a stub | ++--------------------------------+---------------------------------------------+ +| content_type | MIME Content type string | ++--------------------------------+---------------------------------------------+ +| length | Length (bytes) of the attachment data | ++--------------------------------+---------------------------------------------+ +| revpos | Revision where this attachment exists | ++--------------------------------+---------------------------------------------+ + +.. _security_object: + +Security Object +=============== + ++--------------------------------+---------------------------------------------+ +| Field | Description | ++================================+=============================================+ +| admins | Roles/Users with admin privileges | ++--------------------------------+---------------------------------------------+ +| roles [array] | List of roles with parent privilege | ++--------------------------------+---------------------------------------------+ +| users [array] | List of users with parent privilege | ++--------------------------------+---------------------------------------------+ +| readers | Roles/Users with reader privileges | ++--------------------------------+---------------------------------------------+ +| roles [array] | List of roles with parent privilege | ++--------------------------------+---------------------------------------------+ +| users [array] | List of users with parent privilege | ++--------------------------------+---------------------------------------------+ + +.. code-block:: javascript + + { + "admins": { + "names": [ + "Bob" + ], + "roles": [] + }, + "members": { + "names": [ + "Mike", + "Alice" + ], + "roles": [] + } + } + + +.. _userctx_object: + +User Context Object +=================== + ++--------------------------------+---------------------------------------------+ +| Field | Description | ++================================+=============================================+ +| db | Database name in context of provided | +| | operation. | ++--------------------------------+---------------------------------------------+ +| name | User name. | ++--------------------------------+---------------------------------------------+ +| roles | List of user roles. | ++--------------------------------+---------------------------------------------+ + +.. code-block:: javascript + + { + "db": "mailbox", + "name": null, + "roles": [ + "_admin" + ] + } + + +.. _view_head_info_object: + +View Head Information +===================== + ++--------------------------------+---------------------------------------------+ +| Field | Description | ++================================+=============================================+ +| total_rows | Number of documents in the view | ++--------------------------------+---------------------------------------------+ +| offset | Offset where the document list started | ++--------------------------------+---------------------------------------------+ + +.. code-block:: javascript + + { + "total_rows": 42, + "offset": 3 + } + +.. _number-handling: + +Number Handling +=============== + +Any numbers defined in JSON that contain a decimal point or exponent +will be passed through the Erlang VM's idea of the "double" data type. +Any numbers that are used in views will pass through the views idea of +a number (the common JavaScript case means even integers pass through +a double due to JavaScript's definition of a number). + +Consider this document that we write to CouchDB: + +.. code-block:: javascript + + { + "_id":"30b3b38cdbd9e3a587de9b8122000cff", + "number": 1.1 + } + +Now let’s read that document back from CouchDB: + +.. code-block:: javascript + + { + "_id":"30b3b38cdbd9e3a587de9b8122000cff", + "_rev":"1-f065cee7c3fd93aa50f6c97acde93030", + "number":1.1000000000000000888 + } + + +What happens is CouchDB is changing the textual representation of the +result of decoding what it was given into some numerical format. In most +cases this is an `IEEE 754`_ double precision floating point number which +is exactly what almost all other languages use as well. + +.. _IEEE 754: https://en.wikipedia.org/wiki/IEEE_754-2008 + +What CouchDB does a bit differently than other languages is that it +does not attempt to pretty print the resulting output to use the +shortest number of characters. For instance, this is why we have this +relationship: + +.. code-block:: erlang + + ejson:encode(ejson:decode(<<"1.1">>)). + <<"1.1000000000000000888">> + +What can be confusing here is that internally those two formats +decode into the same IEEE-754 representation. And more importantly, it +will decode into a fairly close representation when passed through all +major parsers that I know about. + +While we've only been discussing cases where the textual +representation changes, another important case is when an input value +is contains more precision than can actually represented in a double. +(You could argue that this case is actually "losing" data if you don't +accept that numbers are stored in doubles). + +Here's a log for a couple of the more common JSON libraries I happen +to have on my machine: + +Spidermonkey:: + + $ js -h 2>&1 | head -n 1 + JavaScript-C 1.8.5 2011-03-31 + $ js + js> JSON.stringify(JSON.parse("1.01234567890123456789012345678901234567890")) + "1.0123456789012346" + js> var f = JSON.stringify(JSON.parse("1.01234567890123456789012345678901234567890")) + js> JSON.stringify(JSON.parse(f)) + "1.0123456789012346" + +Node:: + + $ node -v + v0.6.15 + $ node + JSON.stringify(JSON.parse("1.01234567890123456789012345678901234567890")) + '1.0123456789012346' + var f = JSON.stringify(JSON.parse("1.01234567890123456789012345678901234567890")) + undefined + JSON.stringify(JSON.parse(f)) + '1.0123456789012346' + +Python:: + + $ python + Python 2.7.2 (default, Jun 20 2012, 16:23:33) + [GCC 4.2.1 Compatible Apple Clang 4.0 (tags/Apple/clang-418.0.60)] on darwin + Type "help", "copyright", "credits" or "license" for more information. + import json + json.dumps(json.loads("1.01234567890123456789012345678901234567890")) + '1.0123456789012346' + f = json.dumps(json.loads("1.01234567890123456789012345678901234567890")) + json.dumps(json.loads(f)) + '1.0123456789012346' + +Ruby:: + + $ irb --version + irb 0.9.5(05/04/13) + require 'JSON' + => true + JSON.dump(JSON.load("[1.01234567890123456789012345678901234567890]")) + => "[1.01234567890123]" + f = JSON.dump(JSON.load("[1.01234567890123456789012345678901234567890]")) + => "[1.01234567890123]" + JSON.dump(JSON.load(f)) + => "[1.01234567890123]" + + +.. note:: A small aside on Ruby, it requires a top level object or array, so I just + wrapped the value. Should be obvious it doesn't affect the result of + parsing the number though. + + +Ejson (CouchDB's current parser) at CouchDB sha 168a663b:: + + $ ./utils/run -i + Erlang R14B04 (erts-5.8.5) [source] [64-bit] [smp:2:2] [rq:2] + [async-threads:4] [hipe] [kernel-poll:true] + + Eshell V5.8.5 (abort with ^G) + 1> ejson:encode(ejson:decode(<<"1.01234567890123456789012345678901234567890">>)). + <<"1.0123456789012346135">> + 2> F = ejson:encode(ejson:decode(<<"1.01234567890123456789012345678901234567890">>)). + <<"1.0123456789012346135">> + 3> ejson:encode(ejson:decode(F)). + <<"1.0123456789012346135">> + + +As you can see they all pretty much behave the same except for Ruby +actually does appear to be losing some precision over the other +libraries. + +The astute observer will notice that ejson (the CouchDB JSON library) +reported an extra three digits. While its tempting to think that this +is due to some internal difference, its just a more specific case of +the 1.1 input as described above. + +The important point to realize here is that a double can only hold a +finite number of values. What we're doing here is generating a string +that when passed through the "standard" floating point parsing +algorithms (ie, strtod) will result in the same bit pattern in memory +as we started with. Or, slightly different, the bytes in a JSON +serialized number are chosen such that they refer to a single specific +value that a double can represent. + +The important point to understand is that we're mapping from one +infinite set onto a finite set. An easy way to see this is by +reflecting on this:: + + 1.0 == 1.00 == 1.000 = 1.(infinite zeroes) + +Obviously a computer can't hold infinite bytes so we have to +decimate our infinitely sized set to a finite set that can be +represented concisely. + +The game that other JSON libraries are playing is merely: + +"How few characters do I have to use to select this specific value for a double" + +And that game has lots and lots of subtle details that are difficult +to duplicate in C without a significant amount of effort (it took +Python over a year to get it sorted with their fancy build systems +that automatically run on a number of different architectures). + +Hopefully we've shown that CouchDB is not doing anything "funky" by +changing input. Its behaving the same as any other common JSON library +does, its just not pretty printing its output. + +On the other hand, if you actually are in a position where an IEEE-754 +double is not a satisfactory datatype for your numbers, then the +answer as has been stated is to not pass your numbers through this +representation. In JSON this is accomplished by encoding them as a +string or by using integer types (although integer types can still +bite you if you use a platform that has a different integer +representation than normal, ie, JavaScript). + +Also, if anyone is really interested in changing this behavior, I'm +all ears for contributions to `jiffy`_ (which is theoretically going to +replace ejson when I get around to updating the build system). The +places I've looked for inspiration are TCL and Python. If you know a +decent implementation of this float printing algorithm give me a +holler. + +.. _jiffy: https://github.com/davisp/jiffy diff -Nru couchdb-1.2.0/share/doc/build/html/_sources/query-servers.txt couchdb-1.4.0~rc.1/share/doc/build/html/_sources/query-servers.txt --- couchdb-1.2.0/share/doc/build/html/_sources/query-servers.txt 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_sources/query-servers.txt 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,436 @@ +.. Licensed under the Apache License, Version 2.0 (the "License"); you may not +.. use this file except in compliance with the License. You may obtain a copy of +.. the License at +.. +.. http://www.apache.org/licenses/LICENSE-2.0 +.. +.. Unless required by applicable law or agreed to in writing, software +.. distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +.. WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +.. License for the specific language governing permissions and limitations under +.. the License. + +.. default-domain:: js + +============= +Query Servers +============= + +.. _queryserver_js: + +JavaScript +========== + +.. note:: While every design function has access to all JavaScript objects, + the table below describes appropriate usage cases. For example, + you may use :func:`emit` in :ref:`listfun`, but :func:`getRow` is not permitted during :ref:`mapfun`. + ++--------------------------------+---------------------------------------------+ +| JS Function | Reasonable to use in design doc functions | ++================================+=============================================+ +| :func:`emit` | :ref:`mapfun` | ++--------------------------------+---------------------------------------------+ +| :func:`getRow` | :ref:`listfun` | ++--------------------------------+---------------------------------------------+ +| :data:`JSON` | any | ++--------------------------------+---------------------------------------------+ +| :func:`isArray` | any | ++--------------------------------+---------------------------------------------+ +| :func:`log` | any | ++--------------------------------+---------------------------------------------+ +| :func:`provides` | :ref:`showfun`, :ref:`listfun` | ++--------------------------------+---------------------------------------------+ +| :func:`registerType` | :ref:`showfun`, :ref:`listfun` | ++--------------------------------+---------------------------------------------+ +| :func:`require` | any, except :ref:`reducefun` | ++--------------------------------+---------------------------------------------+ +| :func:`send` | :ref:`listfun` | ++--------------------------------+---------------------------------------------+ +| :func:`start` | :ref:`listfun` | ++--------------------------------+---------------------------------------------+ +| :func:`sum` | any | ++--------------------------------+---------------------------------------------+ +| :func:`toJSON` | any | ++--------------------------------+---------------------------------------------+ + +Design functions context +------------------------ + +Each design function executes in a special context of predefined objects, +modules and functions: + + +.. function:: emit(key, value) + + Emits a `key`-`value` pair for further processing by CouchDB after the map + function is done. + + :param key: The view key + :param value: The `key`'s associated value + + .. code-block:: javascript + + function(doc){ + emit(doc._id, doc._rev); + } + + +.. function:: getRow() + + Extracts the next row from a related view result. + + :return: View result row + :rtype: object + + .. code-block:: javascript + + function(head, req){ + send('['); + row = getRow(); + if (row){ + send(toJSON(row)); + while(row = getRow()){ + send(','); + send(toJSON(row)); + } + } + return ']'; + } + + +.. data:: JSON + + `JSON2 `_ + object. + + +.. function:: isArray(obj) + + A helper function to check if the provided value is an `Array`. + + :param obj: Any Javascript value + :return: ``true`` if `obj` is `Array`-typed, ``false`` otherwise + :rtype: boolean + + +.. function:: log(message) + + Log a message to the CouchDB log (at the `INFO` level). + + :param message: Message to be logged + + .. code-block:: javascript + + function(doc){ + log('Procesing doc ' + doc['_id']); + emit(doc['_id'], null); + } + + After the map function has run, the following line can be found in CouchDB + logs (e.g. at `/var/log/couchdb/couch.log`): + + .. code-block:: text + + [Sat, 03 Nov 2012 17:38:02 GMT] [info] [<0.7543.0>] OS Process #Port<0.3289> Log :: Processing doc 8d300b86622d67953d102165dbe99467 + + +.. function:: provides(key, func) + + Registers callable handler for specified MIME key. + + :param key: MIME key previously defined by :func:`registerType` + :param func: MIME type handler + + +.. function:: registerType(key, *mimes) + + Registers list of MIME types by associated `key`. + + :param key: MIME types + :param mimes: MIME types enumeration + + Predefined mappings (`key`-`array`): + + - **all**: ``*/*`` + - **text**: ``text/plain; charset=utf-8``, ``txt`` + - **html**: ``text/html; charset=utf-8`` + - **xhtml**: ``application/xhtml+xml``, ``xhtml`` + - **xml**: ``application/xml``, ``text/xml``, ``application/x-xml`` + - **js**: ``text/javascript``, ``application/javascript``, + ``application/x-javascript`` + - **css**: ``text/css`` + - **ics**: ``text/calendar`` + - **csv**: ``text/csv`` + - **rss**: ``application/rss+xml`` + - **atom**: ``application/atom+xml`` + - **yaml**: ``application/x-yaml``, ``text/yaml`` + - **multipart_form**: ``multipart/form-data`` + - **url_encoded_form**: ``application/x-www-form-urlencoded`` + - **json**: ``application/json``, ``text/x-json`` + + +.. function:: require(path) + + Loads CommonJS module by a specified `path`. The path should not start with + a slash. + + :param path: A CommonJS module path started from design document root + :return: Exported statements + + +.. function:: send(chunk) + + Sends a single string `chunk` in response. + + :param chunk: Text chunk + + .. code-block:: javascript + + function(head, req){ + send('Hello,'); + send(' '); + send('Couch'); + return ! + } + + +.. function:: start(init_resp) + + Initiates chunked response. As an option, a custom + :ref:`response ` object may be sent at this point. + For `list`-functions only! + + .. note:: + + list functions may set the `HTTP response code` and `headers` by calling + this function. This function must be called before :func:`send`, + :func:`getRow` or a `return` statement; otherwise, the query server will + implicitly call this function with the empty object (``{}``). + + .. code-block:: javascript + + function(head, req){ + start({ + "code": 302, + "headers": { + "Location": "http://couchdb.apache.org" + } + }); + return "Relax!"; + } + + +.. function:: sum(arr) + + Sum `arr`'s items. + + :param arr: Array of numbers + :rtype: number + + +.. function:: toJSON(obj) + + Encodes `obj` to JSON string. This is an alias for the ``JSON.stringify`` + method. + + :param obj: JSON encodable object + :return: JSON string + +.. _commonjs: + +CommonJS Modules +---------------- + +Support for `CommonJS Modules `_ +(introduced in CouchDB 0.11.0) allows you to create modular design functions +without the need for duplication of functionality. + +Here's a CommonJS module that checks user permissions: + +.. code-block:: javascript + + function user_context(userctx, secobj) { + var is_admin = function() { + return userctx.indexOf('_admin') != -1; + } + return {'is_admin': is_admin} + } + + exports['user'] = user_context + +Each module has access to additional global variables: + +- **module** (`object`): Contains information about the stored module + + - **id** (`string`): The module id; a JSON path in ddoc context + - **current** (`code`): Compiled module code object + - **parent** (`object`): Parent frame + - **exports** (`object`): Export statements + +- **exports** (`object`): Shortcut to the ``module.exports`` object + +The CommonJS module can be added to a design document, like so: + +.. code-block:: javascript + + { + "views": { + "lib": { + "security": "function user_context(userctx, secobj) { ... }" + }, + "validate_doc_update": "function(newdoc, olddoc, userctx, secobj) { + user = require('lib/security').user(userctx, secobj); + return user.is_admin(); + }" + }, + "_id": "_design/test" + } + +Modules paths are relative to the design document's ``views`` object, but +modules can only be loaded from the object referenced via ``lib``. The +``lib`` structure can still be used for view functions as well, by simply +storing view functions at e.g. ``views.lib.map``, ``views.lib.reduce``, etc. + +.. _queryserver_erlang: + +Erlang +====== + +.. warning:: + + Unlike the JavaScript query server, the Erlang query server does not + run in a sandbox. This means Erlang code has full access to your OS, + filesystem and network, which may lead to security issues. While Erlang + functions are faster than JavaScript ones, you need to be careful + about running them, especially if they were written by someone else. + + +.. note:: + + Due to security restrictions, the Erlang query server is disabled by + default. To enable it, you need to edit your `local.ini` to include a + ``native_query_servers`` section: + + .. code-block:: ini + + [native_query_servers] + erlang = {couch_native_process, start_link, []} + + Don't forget to restart CouchDB after updating the configuration, and + use the ``language: "erlang"`` property in your Erlang design documents. + + +.. function:: Emit(Id, Value) + + Emits `key`-`value` pairs to view indexer process. + + .. code-block:: erlang + + fun({Doc}) -> + <> = proplists:get_value(<<"_rev">>, Doc, null), + V = proplists:get_value(<<"_id">>, Doc, null), + Emit(<>, V) + end. + + +.. function:: FoldRows(Fun, Acc) + + Helper to iterate over all rows in a list function. + + :param Fun: Function object. + :param Acc: The value previously returned by `Fun`. + + .. code-block:: erlang + + fun(Head, {Req}) -> + Fun = fun({Row}, Acc) -> + Id = couch_util:get_value(<<"id">>, Row), + Send(list_to_binary(io_lib:format("Previous doc id: ~p~n", [Acc]))), + Send(list_to_binary(io_lib:format("Current doc id: ~p~n", [Id]))), + {ok, Id} + end, + FoldRows(Fun, nil), + "" + end. + + +.. function:: GetRow() + + Retrieves the next row from a related view result. + + .. code-block:: erlang + + %% FoldRows background implementation. + %% https://git-wip-us.apache.org/repos/asf?p=couchdb.git;a=blob;f=src/couchdb/couch_native_process.erl;hb=HEAD#l368 + %% + foldrows(GetRow, ProcRow, Acc) -> + case GetRow() of + nil -> + {ok, Acc}; + Row -> + case (catch ProcRow(Row, Acc)) of + {ok, Acc2} -> + foldrows(GetRow, ProcRow, Acc2); + {stop, Acc2} -> + {ok, Acc2} + end + end. + +.. function:: Log(Msg) + + :param Msg: Log a message at the `INFO` level. + + .. code-block:: erlang + + fun({Doc}) -> + <> = proplists:get_value(<<"_rev">>, Doc, null), + V = proplists:get_value(<<"_id">>, Doc, null), + Log(lists:flatten(io_lib:format("Hello from ~s doc!", [V]))), + Emit(<>, V) + end. + + After the map function has run, the following line can be found in + CouchDB logs (e.g. at `/var/log/couchdb/couch.log`): + + .. code-block:: text + + [Sun, 04 Nov 2012 11:33:58 GMT] [info] [<0.9144.2>] Hello from 8d300b86622d67953d102165dbe99467 doc! + + +.. function:: Send(Chunk) + + Sends a single string `Chunk` in response. + + .. code-block:: erlang + + fun(Head, {Req}) -> + Send("Hello,"), + Send(" "), + Send("Couch"), + "!" + end. + + The function above produces the following response: + + .. code-block:: text + + Hello, Couch! + + +.. function:: Start(Headers) + + :param Headers: Proplist of :ref:`response object`. + + Initialize :ref:`listfun` response. At this point, response code and headers + may be defined. For example, this function redirects to the CouchDB web site: + + .. code-block:: erlang + + fun(Head, {Req}) -> + Start({[{<<"code">>, 302}, + {<<"headers">>, {[ + {<<"Location">>, <<"http://couchdb.apache.org">>}] + }} + ]}), + "Relax!" + end. diff -Nru couchdb-1.2.0/share/doc/build/html/_sources/replication.txt couchdb-1.4.0~rc.1/share/doc/build/html/_sources/replication.txt --- couchdb-1.2.0/share/doc/build/html/_sources/replication.txt 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_sources/replication.txt 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,95 @@ +.. Licensed under the Apache License, Version 2.0 (the "License"); you may not +.. use this file except in compliance with the License. You may obtain a copy of +.. the License at +.. +.. http://www.apache.org/licenses/LICENSE-2.0 +.. +.. Unless required by applicable law or agreed to in writing, software +.. distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +.. WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +.. License for the specific language governing permissions and limitations under +.. the License. + +.. _replication: + +Replication +=========== + +One of CouchDB's strengths is the ability to synchronize two copies of the same +database. This enables users to distribute data across several nodes or +datacenters, but also to move data more closely to clients. + +Replication involves a source and a destination database, which can be one the +same or on different CouchDB instances. The aim of the replication is that at +the end of the process, all active documents on the source database are also in +the destination database and all documents that were deleted in the source +databases are also deleted on the destination database (if they even existed). + + +Triggering Replication +---------------------- + +Replication is controlled through documents in the :ref:`replicator`, where +each document describes one replication process (see +:ref:`replication-settings`). + +A replication is triggered by storing a replication document in the replicator +database. Its status can be inspected through the active tasks API (see +:ref:`active-tasks` and :ref:`replication-status`). A replication can be +stopped by deleting the document, or by updating it with its `cancel` property +set to `true`. + + +Replication Procedure +--------------------- + +During replication, CouchDB will compare the source and the destination +database to determine which documents differ between the source and the +destination database. It does so by following the :ref:`changes` on the source +and comparing the documents to the destination. Changes are submitted to the +destination in batches where they can introduce conflicts. Documents that +already exist on the destination in the same revision are not transferred. As +the deletion of documents is represented by a new revision, a document deleted +on the source will also be deleted on the target. + +A replication task will finish once it reaches the end of the changes feed. If +its `continuous` property is set to true, it will wait for new changes to +appear until the task is cancelled. Replication tasks also create checkpoint +documents on the destination to ensure that a restarted task can continue from +where it stopped, for example after it has crashed. + +When a replication task is initiated on the sending node, it is called *push* +replication, if it is initiated by the receiving node, it is called *pull* +replication. + + +Master - Master replication +--------------------------- + +One replication task will only transfer changes in one direction. To achieve +master-master replication it is possible to set up two replication tasks in +different directions. When a change is replication from database A to B by the +first task, the second will discover that the new change on B already exists in +A and will wait for further changes. + + +Controlling which Documents to Replicate +---------------------------------------- + +There are two ways for controlling which documents are replicated, and which +are skipped. *Local* documents are never replicated (see :ref:`api-local`). + +Additionally, :ref:`filterfun` can be used in a replication documents (see +:ref:`replication-settings`). The replication task will then evaluate +the filter function for each document in the changes feed. The document will +only be replicated if the filter returns `true`. + + +Migrating Data to Clients +------------------------- + +Replication can be especially useful for bringing data closer to clients. +`PouchDB `_ implements the replication algorithm of CouchDB +in JavaScript, making it possible to make data from a CouchDB database +available in an offline browser application, and synchronize changes back to +CouchDB. diff -Nru couchdb-1.2.0/share/doc/build/html/_sources/replicator.txt couchdb-1.4.0~rc.1/share/doc/build/html/_sources/replicator.txt --- couchdb-1.2.0/share/doc/build/html/_sources/replicator.txt 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_sources/replicator.txt 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,383 @@ +.. Licensed under the Apache License, Version 2.0 (the "License"); you may not +.. use this file except in compliance with the License. You may obtain a copy of +.. the License at +.. +.. http://www.apache.org/licenses/LICENSE-2.0 +.. +.. Unless required by applicable law or agreed to in writing, software +.. distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +.. WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +.. License for the specific language governing permissions and limitations under +.. the License. + +.. _replicator: + +Replicator Database +=================== + +A database where you ``PUT``/``POST`` documents to trigger replications +and you ``DELETE`` to cancel ongoing replications. These documents have +exactly the same content as the JSON objects we used to ``POST`` to +``_replicate`` (fields ``source``, ``target``, ``create_target``, +``continuous``, ``doc_ids``, ``filter``, ``query_params``. + +Replication documents can have a user defined ``_id``. Design documents +(and ``_local`` documents) added to the replicator database are ignored. + +The default name of this database is ``_replicator``. The name can be +changed in the ``local.ini`` configuration, section ``[replicator]``, +parameter ``db``. + +Basics +------ + +Let's say you PUT the following document into ``_replicator``: + +.. code-block:: javascript + + { + "_id": "my_rep", + "source": "http://myserver.com:5984/foo", + "target": "bar", + "create_target": true + } + +In the couch log you'll see 2 entries like these: + +.. code-block:: text + + [Thu, 17 Feb 2011 19:43:59 GMT] [info] [<0.291.0>] Document `my_rep` triggered replication `c0ebe9256695ff083347cbf95f93e280+create_target` + [Thu, 17 Feb 2011 19:44:37 GMT] [info] [<0.124.0>] Replication `c0ebe9256695ff083347cbf95f93e280+create_target` finished (triggered by document `my_rep`) + +As soon as the replication is triggered, the document will be updated by +CouchDB with 3 new fields: + +.. code-block:: javascript + + { + "_id": "my_rep", + "source": "http://myserver.com:5984/foo", + "target": "bar", + "create_target": true, + "_replication_id": "c0ebe9256695ff083347cbf95f93e280", + "_replication_state": "triggered", + "_replication_state_time": 1297974122 + } + +Special fields set by the replicator start with the prefix +``_replication_``. + +- ``_replication_id`` + + The ID internally assigned to the replication. This is also the ID + exposed by ``/_active_tasks``. + +- ``_replication_state`` + + The current state of the replication. + +- ``_replication_state_time`` + + A Unix timestamp (number of seconds since 1 Jan 1970) that tells us + when the current replication state (marked in ``_replication_state``) + was set. + +When the replication finishes, it will update the ``_replication_state`` +field (and ``_replication_state_time``) with the value ``completed``, so +the document will look like: + +.. code-block:: javascript + + { + "_id": "my_rep", + "source": "http://myserver.com:5984/foo", + "target": "bar", + "create_target": true, + "_replication_id": "c0ebe9256695ff083347cbf95f93e280", + "_replication_state": "completed", + "_replication_state_time": 1297974122 + } + +When an error happens during replication, the ``_replication_state`` +field is set to ``error`` (and ``_replication_state_time`` gets updated of +course). + +When you PUT/POST a document to the ``_replicator`` database, CouchDB +will attempt to start the replication up to 10 times (configurable under +``[replicator]``, parameter ``max_replication_retry_count``). If it +fails on the first attempt, it waits 5 seconds before doing a second +attempt. If the second attempt fails, it waits 10 seconds before doing a +third attempt. If the third attempt fails, it waits 20 seconds before +doing a fourth attempt (each attempt doubles the previous wait period). +When an attempt fails, the Couch log will show you something like: + +.. code-block:: text + + [error] [<0.149.0>] Error starting replication `67c1bb92010e7abe35d7d629635f18b6+create_target` (document `my_rep_2`): {db_not_found,<<"could not open http://myserver:5986/foo/">> + +.. note:: + The ``_replication_state`` field is only set to ``error`` when all + the attempts were unsuccessful. + +There are only 3 possible values for the ``_replication_state`` field: +``triggered``, ``completed`` and ``error``. Continuous replications +never get their state set to ``completed``. + +Documents describing the same replication +----------------------------------------- + +Lets suppose 2 documents are added to the ``_replicator`` database in +the following order: + +.. code-block:: javascript + + { + "_id": "doc_A", + "source": "http://myserver.com:5984/foo", + "target": "bar" + } + +and + +.. code-block:: javascript + + { + "_id": "doc_B", + "source": "http://myserver.com:5984/foo", + "target": "bar" + } + +Both describe exactly the same replication (only their ``_ids`` differ). +In this case document ``doc_A`` triggers the replication, getting +updated by CouchDB with the fields ``_replication_state``, +``_replication_state_time`` and ``_replication_id``, just like it was +described before. Document ``doc_B`` however, is only updated with one +field, the ``_replication_id`` so it will look like this: + +.. code-block:: javascript + + { + "_id": "doc_B", + "source": "http://myserver.com:5984/foo", + "target": "bar", + "_replication_id": "c0ebe9256695ff083347cbf95f93e280" + } + +While document ``doc_A`` will look like this: + +.. code-block:: javascript + + { + "_id": "doc_A", + "source": "http://myserver.com:5984/foo", + "target": "bar", + "_replication_id": "c0ebe9256695ff083347cbf95f93e280", + "_replication_state": "triggered", + "_replication_state_time": 1297974122 + } + +Note that both document get exactly the same value for the +``_replication_id`` field. This way you can identify which documents +refer to the same replication - you can for example define a view which +maps replication IDs to document IDs. + +Canceling replications +---------------------- + +To cancel a replication simply ``DELETE`` the document which triggered +the replication. The Couch log will show you an entry like the +following: + +.. code-block:: text + + [Thu, 17 Feb 2011 20:16:29 GMT] [info] [<0.125.0>] Stopped replication `c0ebe9256695ff083347cbf95f93e280+continuous+create_target` because replication document `doc_A` was deleted + +.. note:: + You need to ``DELETE`` the document that triggered the replication. + ``DELETE``-ing another document that describes the same replication + but did not trigger it, will not cancel the replication. + +Server restart +-------------- + +When CouchDB is restarted, it checks its ``_replicator`` database and +restarts any replication that is described by a document that either has +its ``_replication_state`` field set to ``triggered`` or it doesn't have +yet the ``_replication_state`` field set. + +.. note:: + Continuous replications always have a ``_replication_state`` field + with the value ``triggered``, therefore they're always restarted + when CouchDB is restarted. + +Changing the Replicator Database +-------------------------------- + +Imagine your replicator database (default name is ``_replicator``) has the +two following documents that represent pull replications from servers A +and B: + +.. code-block:: javascript + + { + "_id": "rep_from_A", + "source": "http://aserver.com:5984/foo", + "target": "foo_a", + "continuous": true, + "_replication_id": "c0ebe9256695ff083347cbf95f93e280", + "_replication_state": "triggered", + "_replication_state_time": 1297971311 + } + +.. code-block:: javascript + + { + "_id": "rep_from_B", + "source": "http://bserver.com:5984/foo", + "target": "foo_b", + "continuous": true, + "_replication_id": "231bb3cf9d48314eaa8d48a9170570d1", + "_replication_state": "triggered", + "_replication_state_time": 1297974122 + } + +Now without stopping and restarting CouchDB, you change the name of the +replicator database to ``another_replicator_db``: + +.. code-block:: bash + + $ curl -X PUT http://localhost:5984/_config/replicator/db -d '"another_replicator_db"' + "_replicator" + +As soon as this is done, both pull replications defined before, are +stopped. This is explicitly mentioned in CouchDB's log: + +.. code-block:: text + + [Fri, 11 Mar 2011 07:44:20 GMT] [info] [<0.104.0>] Stopping all ongoing replications because the replicator database was deleted or changed + [Fri, 11 Mar 2011 07:44:20 GMT] [info] [<0.127.0>] 127.0.0.1 - - PUT /_config/replicator/db 200 + +Imagine now you add a replication document to the new replicator +database named ``another_replicator_db``: + +.. code-block:: javascript + + { + "_id": "rep_from_X", + "source": "http://xserver.com:5984/foo", + "target": "foo_x", + "continuous": true + } + +From now own you have a single replication going on in your system: a +pull replication pulling from server X. Now you change back the +replicator database to the original one ``_replicator``: + +:: + + $ curl -X PUT http://localhost:5984/_config/replicator/db -d '"_replicator"' + "another_replicator_db" + +Immediately after this operation, the replication pulling from server X +will be stopped and the replications defined in the ``_replicator`` +database (pulling from servers A and B) will be resumed. + +Changing again the replicator database to ``another_replicator_db`` will +stop the pull replications pulling from servers A and B, and resume the +pull replication pulling from server X. + +Replicating the replicator database +----------------------------------- + +Imagine you have in server C a replicator database with the two +following pull replication documents in it: + +.. code-block:: javascript + + { + "_id": "rep_from_A", + "source": "http://aserver.com:5984/foo", + "target": "foo_a", + "continuous": true, + "_replication_id": "c0ebe9256695ff083347cbf95f93e280", + "_replication_state": "triggered", + "_replication_state_time": 1297971311 + } + +.. code-block:: javascript + + { + "_id": "rep_from_B", + "source": "http://bserver.com:5984/foo", + "target": "foo_b", + "continuous": true, + "_replication_id": "231bb3cf9d48314eaa8d48a9170570d1", + "_replication_state": "triggered", + "_replication_state_time": 1297974122 + } + +Now you would like to have the same pull replications going on in server +D, that is, you would like to have server D pull replicating from +servers A and B. You have two options: + +- Explicitly add two documents to server's D replicator database + +- Replicate server's C replicator database into server's D replicator + database + +Both alternatives accomplish exactly the same goal. + +Delegations +----------- + +Replication documents can have a custom ``user_ctx`` property. This +property defines the user context under which a replication runs. For +the old way of triggering replications (POSTing to ``/_replicate/``), +this property was not needed (it didn't exist in fact) - this is because +at the moment of triggering the replication it has information about the +authenticated user. With the replicator database, since it's a regular +database, the information about the authenticated user is only present +at the moment the replication document is written to the database - the +replicator database implementation is like a ``_changes`` feed consumer +(with ``?include_docs=true``) that reacts to what was written to the +replicator database - in fact this feature could be implemented with an +external script/program. This implementation detail implies that for non +admin users, a ``user_ctx`` property, containing the user's name and a +subset of his/her roles, must be defined in the replication document. +This is ensured by the document update validation function present in +the default design document of the replicator database. This validation +function also ensure that a non admin user can set a user name property +in the ``user_ctx`` property that doesn't match his/her own name (same +principle applies for the roles). + +For admins, the ``user_ctx`` property is optional, and if it's missing +it defaults to a user context with name null and an empty list of roles +- this mean design documents will not be written to local targets. If +writing design documents to local targets is desired, the a user context +with the roles ``_admin`` must be set explicitly. + +Also, for admins the ``user_ctx`` property can be used to trigger a +replication on behalf of another user. This is the user context that +will be passed to local target database document validation functions. + +.. note:: + The ``user_ctx`` property only has effect for local endpoints. + +Example delegated replication document: + +.. code-block:: javascript + + { + "_id": "my_rep", + "source": "http://bserver.com:5984/foo", + "target": "bar", + "continuous": true, + "user_ctx": { + "name": "joe", + "roles": ["erlanger", "researcher"] + } + } + +As stated before, for admins the ``user_ctx`` property is optional, while +for regular (non admin) users it's mandatory. When the roles property of +``user_ctx`` is missing, it defaults to the empty list ``[ ]``. Binary files /tmp/DjhtJwK2X2/couchdb-1.2.0/share/doc/build/html/_static/ajax-loader.gif and /tmp/T3VxxJYoFO/couchdb-1.4.0~rc.1/share/doc/build/html/_static/ajax-loader.gif differ diff -Nru couchdb-1.2.0/share/doc/build/html/_static/basic.css couchdb-1.4.0~rc.1/share/doc/build/html/_static/basic.css --- couchdb-1.2.0/share/doc/build/html/_static/basic.css 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_static/basic.css 2013-08-23 10:59:02.000000000 -0400 @@ -0,0 +1,540 @@ +/* + * basic.css + * ~~~~~~~~~ + * + * Sphinx stylesheet -- basic theme. + * + * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* -- main layout ----------------------------------------------------------- */ + +div.clearer { + clear: both; +} + +/* -- relbar ---------------------------------------------------------------- */ + +div.related { + width: 100%; + font-size: 90%; +} + +div.related h3 { + display: none; +} + +div.related ul { + margin: 0; + padding: 0 0 0 10px; + list-style: none; +} + +div.related li { + display: inline; +} + +div.related li.right { + float: right; + margin-right: 5px; +} + +/* -- sidebar --------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 10px 5px 0 10px; +} + +div.sphinxsidebar { + float: left; + width: 230px; + margin-left: -100%; + font-size: 90%; +} + +div.sphinxsidebar ul { + list-style: none; +} + +div.sphinxsidebar ul ul, +div.sphinxsidebar ul.want-points { + margin-left: 20px; + list-style: square; +} + +div.sphinxsidebar ul ul { + margin-top: 0; + margin-bottom: 0; +} + +div.sphinxsidebar form { + margin-top: 10px; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + +div.sphinxsidebar #searchbox input[type="text"] { + width: 170px; +} + +div.sphinxsidebar #searchbox input[type="submit"] { + width: 30px; +} + +img { + border: 0; +} + +/* -- search page ----------------------------------------------------------- */ + +ul.search { + margin: 10px 0 0 20px; + padding: 0; +} + +ul.search li { + padding: 5px 0 5px 20px; + background-image: url(file.png); + background-repeat: no-repeat; + background-position: 0 7px; +} + +ul.search li a { + font-weight: bold; +} + +ul.search li div.context { + color: #888; + margin: 2px 0 0 30px; + text-align: left; +} + +ul.keywordmatches li.goodmatch a { + font-weight: bold; +} + +/* -- index page ------------------------------------------------------------ */ + +table.contentstable { + width: 90%; +} + +table.contentstable p.biglink { + line-height: 150%; +} + +a.biglink { + font-size: 1.3em; +} + +span.linkdescr { + font-style: italic; + padding-top: 5px; + font-size: 90%; +} + +/* -- general index --------------------------------------------------------- */ + +table.indextable { + width: 100%; +} + +table.indextable td { + text-align: left; + vertical-align: top; +} + +table.indextable dl, table.indextable dd { + margin-top: 0; + margin-bottom: 0; +} + +table.indextable tr.pcap { + height: 10px; +} + +table.indextable tr.cap { + margin-top: 10px; + background-color: #f2f2f2; +} + +img.toggler { + margin-right: 3px; + margin-top: 3px; + cursor: pointer; +} + +div.modindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +div.genindex-jumpbox { + border-top: 1px solid #ddd; + border-bottom: 1px solid #ddd; + margin: 1em 0 1em 0; + padding: 0.4em; +} + +/* -- general body styles --------------------------------------------------- */ + +a.headerlink { + visibility: hidden; +} + +h1:hover > a.headerlink, +h2:hover > a.headerlink, +h3:hover > a.headerlink, +h4:hover > a.headerlink, +h5:hover > a.headerlink, +h6:hover > a.headerlink, +dt:hover > a.headerlink { + visibility: visible; +} + +div.body p.caption { + text-align: inherit; +} + +div.body td { + text-align: left; +} + +.field-list ul { + padding-left: 1em; +} + +.first { + margin-top: 0 !important; +} + +p.rubric { + margin-top: 30px; + font-weight: bold; +} + +img.align-left, .figure.align-left, object.align-left { + clear: left; + float: left; + margin-right: 1em; +} + +img.align-right, .figure.align-right, object.align-right { + clear: right; + float: right; + margin-left: 1em; +} + +img.align-center, .figure.align-center, object.align-center { + display: block; + margin-left: auto; + margin-right: auto; +} + +.align-left { + text-align: left; +} + +.align-center { + text-align: center; +} + +.align-right { + text-align: right; +} + +/* -- sidebars -------------------------------------------------------------- */ + +div.sidebar { + margin: 0 0 0.5em 1em; + border: 1px solid #ddb; + padding: 7px 7px 0 7px; + background-color: #ffe; + width: 40%; + float: right; +} + +p.sidebar-title { + font-weight: bold; +} + +/* -- topics ---------------------------------------------------------------- */ + +div.topic { + border: 1px solid #ccc; + padding: 7px 7px 0 7px; + margin: 10px 0 10px 0; +} + +p.topic-title { + font-size: 1.1em; + font-weight: bold; + margin-top: 10px; +} + +/* -- admonitions ----------------------------------------------------------- */ + +div.admonition { + margin-top: 10px; + margin-bottom: 10px; + padding: 7px; +} + +div.admonition dt { + font-weight: bold; +} + +div.admonition dl { + margin-bottom: 0; +} + +p.admonition-title { + margin: 0px 10px 5px 0px; + font-weight: bold; +} + +div.body p.centered { + text-align: center; + margin-top: 25px; +} + +/* -- tables ---------------------------------------------------------------- */ + +table.docutils { + border: 0; + border-collapse: collapse; +} + +table.docutils td, table.docutils th { + padding: 1px 8px 1px 5px; + border-top: 0; + border-left: 0; + border-right: 0; + border-bottom: 1px solid #aaa; +} + +table.field-list td, table.field-list th { + border: 0 !important; +} + +table.footnote td, table.footnote th { + border: 0 !important; +} + +th { + text-align: left; + padding-right: 5px; +} + +table.citation { + border-left: solid 1px gray; + margin-left: 1px; +} + +table.citation td { + border-bottom: none; +} + +/* -- other body styles ----------------------------------------------------- */ + +ol.arabic { + list-style: decimal; +} + +ol.loweralpha { + list-style: lower-alpha; +} + +ol.upperalpha { + list-style: upper-alpha; +} + +ol.lowerroman { + list-style: lower-roman; +} + +ol.upperroman { + list-style: upper-roman; +} + +dl { + margin-bottom: 15px; +} + +dd p { + margin-top: 0px; +} + +dd ul, dd table { + margin-bottom: 10px; +} + +dd { + margin-top: 3px; + margin-bottom: 10px; + margin-left: 30px; +} + +dt:target, .highlighted { + background-color: #fbe54e; +} + +dl.glossary dt { + font-weight: bold; + font-size: 1.1em; +} + +.field-list ul { + margin: 0; + padding-left: 1em; +} + +.field-list p { + margin: 0; +} + +.refcount { + color: #060; +} + +.optional { + font-size: 1.3em; +} + +.versionmodified { + font-style: italic; +} + +.system-message { + background-color: #fda; + padding: 5px; + border: 3px solid red; +} + +.footnote:target { + background-color: #ffa; +} + +.line-block { + display: block; + margin-top: 1em; + margin-bottom: 1em; +} + +.line-block .line-block { + margin-top: 0; + margin-bottom: 0; + margin-left: 1.5em; +} + +.guilabel, .menuselection { + font-family: sans-serif; +} + +.accelerator { + text-decoration: underline; +} + +.classifier { + font-style: oblique; +} + +abbr, acronym { + border-bottom: dotted 1px; + cursor: help; +} + +/* -- code displays --------------------------------------------------------- */ + +pre { + overflow: auto; + overflow-y: hidden; /* fixes display issues on Chrome browsers */ +} + +td.linenos pre { + padding: 5px 0px; + border: 0; + background-color: transparent; + color: #aaa; +} + +table.highlighttable { + margin-left: 0.5em; +} + +table.highlighttable td { + padding: 0 0.5em 0 0.5em; +} + +tt.descname { + background-color: transparent; + font-weight: bold; + font-size: 1.2em; +} + +tt.descclassname { + background-color: transparent; +} + +tt.xref, a tt { + background-color: transparent; + font-weight: bold; +} + +h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt { + background-color: transparent; +} + +.viewcode-link { + float: right; +} + +.viewcode-back { + float: right; + font-family: sans-serif; +} + +div.viewcode-block:target { + margin: -1px -10px; + padding: 0 10px; +} + +/* -- math display ---------------------------------------------------------- */ + +img.math { + vertical-align: middle; +} + +div.body div.math p { + text-align: center; +} + +span.eqno { + float: right; +} + +/* -- printout stylesheet --------------------------------------------------- */ + +@media print { + div.document, + div.documentwrapper, + div.bodywrapper { + margin: 0 !important; + width: 100%; + } + + div.sphinxsidebar, + div.related, + div.footer, + #top-link { + display: none; + } +} \ No newline at end of file Binary files /tmp/DjhtJwK2X2/couchdb-1.2.0/share/doc/build/html/_static/comment-bright.png and /tmp/T3VxxJYoFO/couchdb-1.4.0~rc.1/share/doc/build/html/_static/comment-bright.png differ Binary files /tmp/DjhtJwK2X2/couchdb-1.2.0/share/doc/build/html/_static/comment-close.png and /tmp/T3VxxJYoFO/couchdb-1.4.0~rc.1/share/doc/build/html/_static/comment-close.png differ Binary files /tmp/DjhtJwK2X2/couchdb-1.2.0/share/doc/build/html/_static/comment.png and /tmp/T3VxxJYoFO/couchdb-1.4.0~rc.1/share/doc/build/html/_static/comment.png differ diff -Nru couchdb-1.2.0/share/doc/build/html/_static/default.css couchdb-1.4.0~rc.1/share/doc/build/html/_static/default.css --- couchdb-1.2.0/share/doc/build/html/_static/default.css 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_static/default.css 2013-08-23 10:59:02.000000000 -0400 @@ -0,0 +1,256 @@ +/* + * default.css_t + * ~~~~~~~~~~~~~ + * + * Sphinx stylesheet -- default theme. + * + * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +@import url("basic.css"); + +/* -- page layout ----------------------------------------------------------- */ + +body { + font-family: sans-serif; + font-size: 100%; + background-color: #11303d; + color: #000; + margin: 0; + padding: 0; +} + +div.document { + background-color: #1c4e63; +} + +div.documentwrapper { + float: left; + width: 100%; +} + +div.bodywrapper { + margin: 0 0 0 230px; +} + +div.body { + background-color: #ffffff; + color: #000000; + padding: 0 20px 30px 20px; +} + +div.footer { + color: #ffffff; + width: 100%; + padding: 9px 0 9px 0; + text-align: center; + font-size: 75%; +} + +div.footer a { + color: #ffffff; + text-decoration: underline; +} + +div.related { + background-color: #133f52; + line-height: 30px; + color: #ffffff; +} + +div.related a { + color: #ffffff; +} + +div.sphinxsidebar { +} + +div.sphinxsidebar h3 { + font-family: 'Trebuchet MS', sans-serif; + color: #ffffff; + font-size: 1.4em; + font-weight: normal; + margin: 0; + padding: 0; +} + +div.sphinxsidebar h3 a { + color: #ffffff; +} + +div.sphinxsidebar h4 { + font-family: 'Trebuchet MS', sans-serif; + color: #ffffff; + font-size: 1.3em; + font-weight: normal; + margin: 5px 0 0 0; + padding: 0; +} + +div.sphinxsidebar p { + color: #ffffff; +} + +div.sphinxsidebar p.topless { + margin: 5px 10px 10px 10px; +} + +div.sphinxsidebar ul { + margin: 10px; + padding: 0; + color: #ffffff; +} + +div.sphinxsidebar a { + color: #98dbcc; +} + +div.sphinxsidebar input { + border: 1px solid #98dbcc; + font-family: sans-serif; + font-size: 1em; +} + + + +/* -- hyperlink styles ------------------------------------------------------ */ + +a { + color: #355f7c; + text-decoration: none; +} + +a:visited { + color: #355f7c; + text-decoration: none; +} + +a:hover { + text-decoration: underline; +} + + + +/* -- body styles ----------------------------------------------------------- */ + +div.body h1, +div.body h2, +div.body h3, +div.body h4, +div.body h5, +div.body h6 { + font-family: 'Trebuchet MS', sans-serif; + background-color: #f2f2f2; + font-weight: normal; + color: #20435c; + border-bottom: 1px solid #ccc; + margin: 20px -20px 10px -20px; + padding: 3px 0 3px 10px; +} + +div.body h1 { margin-top: 0; font-size: 200%; } +div.body h2 { font-size: 160%; } +div.body h3 { font-size: 140%; } +div.body h4 { font-size: 120%; } +div.body h5 { font-size: 110%; } +div.body h6 { font-size: 100%; } + +a.headerlink { + color: #c60f0f; + font-size: 0.8em; + padding: 0 4px 0 4px; + text-decoration: none; +} + +a.headerlink:hover { + background-color: #c60f0f; + color: white; +} + +div.body p, div.body dd, div.body li { + text-align: justify; + line-height: 130%; +} + +div.admonition p.admonition-title + p { + display: inline; +} + +div.admonition p { + margin-bottom: 5px; +} + +div.admonition pre { + margin-bottom: 5px; +} + +div.admonition ul, div.admonition ol { + margin-bottom: 5px; +} + +div.note { + background-color: #eee; + border: 1px solid #ccc; +} + +div.seealso { + background-color: #ffc; + border: 1px solid #ff6; +} + +div.topic { + background-color: #eee; +} + +div.warning { + background-color: #ffe4e4; + border: 1px solid #f66; +} + +p.admonition-title { + display: inline; +} + +p.admonition-title:after { + content: ":"; +} + +pre { + padding: 5px; + background-color: #eeffcc; + color: #333333; + line-height: 120%; + border: 1px solid #ac9; + border-left: none; + border-right: none; +} + +tt { + background-color: #ecf0f3; + padding: 0 1px 0 1px; + font-size: 0.95em; +} + +th { + background-color: #ede; +} + +.warning tt { + background: #efc2c2; +} + +.note tt { + background: #d6d6d6; +} + +.viewcode-back { + font-family: sans-serif; +} + +div.viewcode-block:target { + background-color: #f4debf; + border-top: 1px solid #ac9; + border-bottom: 1px solid #ac9; +} \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/_static/doctools.js couchdb-1.4.0~rc.1/share/doc/build/html/_static/doctools.js --- couchdb-1.2.0/share/doc/build/html/_static/doctools.js 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_static/doctools.js 2011-11-01 10:13:30.000000000 -0400 @@ -0,0 +1,247 @@ +/* + * doctools.js + * ~~~~~~~~~~~ + * + * Sphinx JavaScript utilities for all documentation. + * + * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/** + * select a different prefix for underscore + */ +$u = _.noConflict(); + +/** + * make the code below compatible with browsers without + * an installed firebug like debugger +if (!window.console || !console.firebug) { + var names = ["log", "debug", "info", "warn", "error", "assert", "dir", + "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", + "profile", "profileEnd"]; + window.console = {}; + for (var i = 0; i < names.length; ++i) + window.console[names[i]] = function() {}; +} + */ + +/** + * small helper function to urldecode strings + */ +jQuery.urldecode = function(x) { + return decodeURIComponent(x).replace(/\+/g, ' '); +} + +/** + * small helper function to urlencode strings + */ +jQuery.urlencode = encodeURIComponent; + +/** + * This function returns the parsed url parameters of the + * current request. Multiple values per key are supported, + * it will always return arrays of strings for the value parts. + */ +jQuery.getQueryParameters = function(s) { + if (typeof s == 'undefined') + s = document.location.search; + var parts = s.substr(s.indexOf('?') + 1).split('&'); + var result = {}; + for (var i = 0; i < parts.length; i++) { + var tmp = parts[i].split('=', 2); + var key = jQuery.urldecode(tmp[0]); + var value = jQuery.urldecode(tmp[1]); + if (key in result) + result[key].push(value); + else + result[key] = [value]; + } + return result; +}; + +/** + * small function to check if an array contains + * a given item. + */ +jQuery.contains = function(arr, item) { + for (var i = 0; i < arr.length; i++) { + if (arr[i] == item) + return true; + } + return false; +}; + +/** + * highlight a given string on a jquery object by wrapping it in + * span elements with the given class name. + */ +jQuery.fn.highlightText = function(text, className) { + function highlight(node) { + if (node.nodeType == 3) { + var val = node.nodeValue; + var pos = val.toLowerCase().indexOf(text); + if (pos >= 0 && !jQuery(node.parentNode).hasClass(className)) { + var span = document.createElement("span"); + span.className = className; + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + node.parentNode.insertBefore(span, node.parentNode.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling)); + node.nodeValue = val.substr(0, pos); + } + } + else if (!jQuery(node).is("button, select, textarea")) { + jQuery.each(node.childNodes, function() { + highlight(this); + }); + } + } + return this.each(function() { + highlight(this); + }); +}; + +/** + * Small JavaScript module for the documentation. + */ +var Documentation = { + + init : function() { + this.fixFirefoxAnchorBug(); + this.highlightSearchWords(); + this.initIndexTable(); + }, + + /** + * i18n support + */ + TRANSLATIONS : {}, + PLURAL_EXPR : function(n) { return n == 1 ? 0 : 1; }, + LOCALE : 'unknown', + + // gettext and ngettext don't access this so that the functions + // can safely bound to a different name (_ = Documentation.gettext) + gettext : function(string) { + var translated = Documentation.TRANSLATIONS[string]; + if (typeof translated == 'undefined') + return string; + return (typeof translated == 'string') ? translated : translated[0]; + }, + + ngettext : function(singular, plural, n) { + var translated = Documentation.TRANSLATIONS[singular]; + if (typeof translated == 'undefined') + return (n == 1) ? singular : plural; + return translated[Documentation.PLURALEXPR(n)]; + }, + + addTranslations : function(catalog) { + for (var key in catalog.messages) + this.TRANSLATIONS[key] = catalog.messages[key]; + this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); + this.LOCALE = catalog.locale; + }, + + /** + * add context elements like header anchor links + */ + addContextElements : function() { + $('div[id] > :header:first').each(function() { + $('\u00B6'). + attr('href', '#' + this.id). + attr('title', _('Permalink to this headline')). + appendTo(this); + }); + $('dt[id]').each(function() { + $('\u00B6'). + attr('href', '#' + this.id). + attr('title', _('Permalink to this definition')). + appendTo(this); + }); + }, + + /** + * workaround a firefox stupidity + */ + fixFirefoxAnchorBug : function() { + if (document.location.hash && $.browser.mozilla) + window.setTimeout(function() { + document.location.href += ''; + }, 10); + }, + + /** + * highlight the search words provided in the url in the text + */ + highlightSearchWords : function() { + var params = $.getQueryParameters(); + var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; + if (terms.length) { + var body = $('div.body'); + window.setTimeout(function() { + $.each(terms, function() { + body.highlightText(this.toLowerCase(), 'highlighted'); + }); + }, 10); + $('') + .appendTo($('#searchbox')); + } + }, + + /** + * init the domain index toggle buttons + */ + initIndexTable : function() { + var togglers = $('img.toggler').click(function() { + var src = $(this).attr('src'); + var idnum = $(this).attr('id').substr(7); + $('tr.cg-' + idnum).toggle(); + if (src.substr(-9) == 'minus.png') + $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); + else + $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); + }).css('display', ''); + if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { + togglers.click(); + } + }, + + /** + * helper function to hide the search marks again + */ + hideSearchWords : function() { + $('#searchbox .highlight-link').fadeOut(300); + $('span.highlighted').removeClass('highlighted'); + }, + + /** + * make the url absolute + */ + makeURL : function(relativeURL) { + return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; + }, + + /** + * get the current relative url + */ + getCurrentURL : function() { + var path = document.location.pathname; + var parts = path.split(/\//); + $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { + if (this == '..') + parts.pop(); + }); + var url = parts.join('/'); + return path.substring(url.lastIndexOf('/') + 1, path.length - 1); + } +}; + +// quick alias for translations +_ = Documentation.gettext; + +$(document).ready(function() { + Documentation.init(); +}); Binary files /tmp/DjhtJwK2X2/couchdb-1.2.0/share/doc/build/html/_static/down.png and /tmp/T3VxxJYoFO/couchdb-1.4.0~rc.1/share/doc/build/html/_static/down.png differ Binary files /tmp/DjhtJwK2X2/couchdb-1.2.0/share/doc/build/html/_static/down-pressed.png and /tmp/T3VxxJYoFO/couchdb-1.4.0~rc.1/share/doc/build/html/_static/down-pressed.png differ Binary files /tmp/DjhtJwK2X2/couchdb-1.2.0/share/doc/build/html/_static/favicon.ico and /tmp/T3VxxJYoFO/couchdb-1.4.0~rc.1/share/doc/build/html/_static/favicon.ico differ Binary files /tmp/DjhtJwK2X2/couchdb-1.2.0/share/doc/build/html/_static/file.png and /tmp/T3VxxJYoFO/couchdb-1.4.0~rc.1/share/doc/build/html/_static/file.png differ diff -Nru couchdb-1.2.0/share/doc/build/html/_static/jquery.js couchdb-1.4.0~rc.1/share/doc/build/html/_static/jquery.js --- couchdb-1.2.0/share/doc/build/html/_static/jquery.js 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_static/jquery.js 2012-03-10 15:38:40.000000000 -0500 @@ -0,0 +1,154 @@ +/*! + * jQuery JavaScript Library v1.4.2 + * http://jquery.com/ + * + * Copyright 2010, John Resig + * Dual licensed under the MIT or GPL Version 2 licenses. + * http://jquery.org/license + * + * Includes Sizzle.js + * http://sizzlejs.com/ + * Copyright 2010, The Dojo Foundation + * Released under the MIT, BSD, and GPL Licenses. + * + * Date: Sat Feb 13 22:33:48 2010 -0500 + */ +(function(A,w){function ma(){if(!c.isReady){try{s.documentElement.doScroll("left")}catch(a){setTimeout(ma,1);return}c.ready()}}function Qa(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)}function X(a,b,d,f,e,j){var i=a.length;if(typeof b==="object"){for(var o in b)X(a,o,b[o],f,e,d);return a}if(d!==w){f=!j&&f&&c.isFunction(d);for(o=0;o)[^>]*$|^#([\w-]+)$/,Ua=/^.[^:#\[\.,]*$/,Va=/\S/, +Wa=/^(\s|\u00A0)+|(\s|\u00A0)+$/g,Xa=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,P=navigator.userAgent,xa=false,Q=[],L,$=Object.prototype.toString,aa=Object.prototype.hasOwnProperty,ba=Array.prototype.push,R=Array.prototype.slice,ya=Array.prototype.indexOf;c.fn=c.prototype={init:function(a,b){var d,f;if(!a)return this;if(a.nodeType){this.context=this[0]=a;this.length=1;return this}if(a==="body"&&!b){this.context=s;this[0]=s.body;this.selector="body";this.length=1;return this}if(typeof a==="string")if((d=Ta.exec(a))&& +(d[1]||!b))if(d[1]){f=b?b.ownerDocument||b:s;if(a=Xa.exec(a))if(c.isPlainObject(b)){a=[s.createElement(a[1])];c.fn.attr.call(a,b,true)}else a=[f.createElement(a[1])];else{a=sa([d[1]],[f]);a=(a.cacheable?a.fragment.cloneNode(true):a.fragment).childNodes}return c.merge(this,a)}else{if(b=s.getElementById(d[2])){if(b.id!==d[2])return T.find(a);this.length=1;this[0]=b}this.context=s;this.selector=a;return this}else if(!b&&/^\w+$/.test(a)){this.selector=a;this.context=s;a=s.getElementsByTagName(a);return c.merge(this, +a)}else return!b||b.jquery?(b||T).find(a):c(b).find(a);else if(c.isFunction(a))return T.ready(a);if(a.selector!==w){this.selector=a.selector;this.context=a.context}return c.makeArray(a,this)},selector:"",jquery:"1.4.2",length:0,size:function(){return this.length},toArray:function(){return R.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this.slice(a)[0]:this[a]},pushStack:function(a,b,d){var f=c();c.isArray(a)?ba.apply(f,a):c.merge(f,a);f.prevObject=this;f.context=this.context;if(b=== +"find")f.selector=this.selector+(this.selector?" ":"")+d;else if(b)f.selector=this.selector+"."+b+"("+d+")";return f},each:function(a,b){return c.each(this,a,b)},ready:function(a){c.bindReady();if(c.isReady)a.call(s,c);else Q&&Q.push(a);return this},eq:function(a){return a===-1?this.slice(a):this.slice(a,+a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(R.apply(this,arguments),"slice",R.call(arguments).join(","))},map:function(a){return this.pushStack(c.map(this, +function(b,d){return a.call(b,d,b)}))},end:function(){return this.prevObject||c(null)},push:ba,sort:[].sort,splice:[].splice};c.fn.init.prototype=c.fn;c.extend=c.fn.extend=function(){var a=arguments[0]||{},b=1,d=arguments.length,f=false,e,j,i,o;if(typeof a==="boolean"){f=a;a=arguments[1]||{};b=2}if(typeof a!=="object"&&!c.isFunction(a))a={};if(d===b){a=this;--b}for(;b
a"; +var e=d.getElementsByTagName("*"),j=d.getElementsByTagName("a")[0];if(!(!e||!e.length||!j)){c.support={leadingWhitespace:d.firstChild.nodeType===3,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/red/.test(j.getAttribute("style")),hrefNormalized:j.getAttribute("href")==="/a",opacity:/^0.55$/.test(j.style.opacity),cssFloat:!!j.style.cssFloat,checkOn:d.getElementsByTagName("input")[0].value==="on",optSelected:s.createElement("select").appendChild(s.createElement("option")).selected, +parentNode:d.removeChild(d.appendChild(s.createElement("div"))).parentNode===null,deleteExpando:true,checkClone:false,scriptEval:false,noCloneEvent:true,boxModel:null};b.type="text/javascript";try{b.appendChild(s.createTextNode("window."+f+"=1;"))}catch(i){}a.insertBefore(b,a.firstChild);if(A[f]){c.support.scriptEval=true;delete A[f]}try{delete b.test}catch(o){c.support.deleteExpando=false}a.removeChild(b);if(d.attachEvent&&d.fireEvent){d.attachEvent("onclick",function k(){c.support.noCloneEvent= +false;d.detachEvent("onclick",k)});d.cloneNode(true).fireEvent("onclick")}d=s.createElement("div");d.innerHTML="";a=s.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var k=s.createElement("div");k.style.width=k.style.paddingLeft="1px";s.body.appendChild(k);c.boxModel=c.support.boxModel=k.offsetWidth===2;s.body.removeChild(k).style.display="none"});a=function(k){var n= +s.createElement("div");k="on"+k;var r=k in n;if(!r){n.setAttribute(k,"return;");r=typeof n[k]==="function"}return r};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=e=j=null}})();c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};var G="jQuery"+J(),Ya=0,za={};c.extend({cache:{},expando:G,noData:{embed:true,object:true, +applet:true},data:function(a,b,d){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var f=a[G],e=c.cache;if(!f&&typeof b==="string"&&d===w)return null;f||(f=++Ya);if(typeof b==="object"){a[G]=f;e[f]=c.extend(true,{},b)}else if(!e[f]){a[G]=f;e[f]={}}a=e[f];if(d!==w)a[b]=d;return typeof b==="string"?a[b]:a}},removeData:function(a,b){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var d=a[G],f=c.cache,e=f[d];if(b){if(e){delete e[b];c.isEmptyObject(e)&&c.removeData(a)}}else{if(c.support.deleteExpando)delete a[c.expando]; +else a.removeAttribute&&a.removeAttribute(c.expando);delete f[d]}}}});c.fn.extend({data:function(a,b){if(typeof a==="undefined"&&this.length)return c.data(this[0]);else if(typeof a==="object")return this.each(function(){c.data(this,a)});var d=a.split(".");d[1]=d[1]?"."+d[1]:"";if(b===w){var f=this.triggerHandler("getData"+d[1]+"!",[d[0]]);if(f===w&&this.length)f=c.data(this[0],a);return f===w&&d[1]?this.data(d[0]):f}else return this.trigger("setData"+d[1]+"!",[d[0],b]).each(function(){c.data(this, +a,b)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var f=c.data(a,b);if(!d)return f||[];if(!f||c.isArray(d))f=c.data(a,b,c.makeArray(d));else f.push(d);return f}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),f=d.shift();if(f==="inprogress")f=d.shift();if(f){b==="fx"&&d.unshift("inprogress");f.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a;a="fx"}if(b=== +w)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this,a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var Aa=/[\n\t]/g,ca=/\s+/,Za=/\r/g,$a=/href|src|style/,ab=/(button|input)/i,bb=/(button|input|object|select|textarea)/i, +cb=/^(a|area)$/i,Ba=/radio|checkbox/;c.fn.extend({attr:function(a,b){return X(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this,a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(n){var r=c(this);r.addClass(a.call(this,n,r.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ca),d=0,f=this.length;d-1)return true;return false},val:function(a){if(a===w){var b=this[0];if(b){if(c.nodeName(b,"option"))return(b.attributes.value||{}).specified?b.value:b.text;if(c.nodeName(b,"select")){var d=b.selectedIndex,f=[],e=b.options;b=b.type==="select-one";if(d<0)return null;var j=b?d:0;for(d=b?d+1:e.length;j=0;else if(c.nodeName(this,"select")){var u=c.makeArray(r);c("option",this).each(function(){this.selected= +c.inArray(c(this).val(),u)>=0});if(!u.length)this.selectedIndex=-1}else this.value=r}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a,b,d,f){if(!a||a.nodeType===3||a.nodeType===8)return w;if(f&&b in c.attrFn)return c(a)[b](d);f=a.nodeType!==1||!c.isXMLDoc(a);var e=d!==w;b=f&&c.props[b]||b;if(a.nodeType===1){var j=$a.test(b);if(b in a&&f&&!j){if(e){b==="type"&&ab.test(a.nodeName)&&a.parentNode&&c.error("type property can't be changed"); +a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&b.specified?b.value:bb.test(a.nodeName)||cb.test(a.nodeName)&&a.href?0:w;return a[b]}if(!c.support.style&&f&&b==="style"){if(e)a.style.cssText=""+d;return a.style.cssText}e&&a.setAttribute(b,""+d);a=!c.support.hrefNormalized&&f&&j?a.getAttribute(b,2):a.getAttribute(b);return a===null?w:a}return c.style(a,b,d)}});var O=/\.(.*)$/,db=function(a){return a.replace(/[^\w\s\.\|`]/g, +function(b){return"\\"+b})};c.event={add:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){if(a.setInterval&&a!==A&&!a.frameElement)a=A;var e,j;if(d.handler){e=d;d=e.handler}if(!d.guid)d.guid=c.guid++;if(j=c.data(a)){var i=j.events=j.events||{},o=j.handle;if(!o)j.handle=o=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(o.elem,arguments):w};o.elem=a;b=b.split(" ");for(var k,n=0,r;k=b[n++];){j=e?c.extend({},e):{handler:d,data:f};if(k.indexOf(".")>-1){r=k.split("."); +k=r.shift();j.namespace=r.slice(0).sort().join(".")}else{r=[];j.namespace=""}j.type=k;j.guid=d.guid;var u=i[k],z=c.event.special[k]||{};if(!u){u=i[k]=[];if(!z.setup||z.setup.call(a,f,r,o)===false)if(a.addEventListener)a.addEventListener(k,o,false);else a.attachEvent&&a.attachEvent("on"+k,o)}if(z.add){z.add.call(a,j);if(!j.handler.guid)j.handler.guid=d.guid}u.push(j);c.event.global[k]=true}a=null}}},global:{},remove:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){var e,j=0,i,o,k,n,r,u,z=c.data(a), +C=z&&z.events;if(z&&C){if(b&&b.type){d=b.handler;b=b.type}if(!b||typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(e in C)c.event.remove(a,e+b)}else{for(b=b.split(" ");e=b[j++];){n=e;i=e.indexOf(".")<0;o=[];if(!i){o=e.split(".");e=o.shift();k=new RegExp("(^|\\.)"+c.map(o.slice(0).sort(),db).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(r=C[e])if(d){n=c.event.special[e]||{};for(B=f||0;B=0){a.type= +e=e.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[e]&&c.each(c.cache,function(){this.events&&this.events[e]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===8)return w;a.result=w;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(f=c.data(d,"handle"))&&f.apply(d,b);f=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+e]&&d["on"+e].apply(d,b)===false)a.result=false}catch(j){}if(!a.isPropagationStopped()&& +f)c.event.trigger(a,b,f,true);else if(!a.isDefaultPrevented()){f=a.target;var i,o=c.nodeName(f,"a")&&e==="click",k=c.event.special[e]||{};if((!k._default||k._default.call(d,a)===false)&&!o&&!(f&&f.nodeName&&c.noData[f.nodeName.toLowerCase()])){try{if(f[e]){if(i=f["on"+e])f["on"+e]=null;c.event.triggered=true;f[e]()}}catch(n){}if(i)f["on"+e]=i;c.event.triggered=false}}},handle:function(a){var b,d,f,e;a=arguments[0]=c.event.fix(a||A.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.exclusive; +if(!b){d=a.type.split(".");a.type=d.shift();f=new RegExp("(^|\\.)"+d.slice(0).sort().join("\\.(?:.*\\.)?")+"(\\.|$)")}e=c.data(this,"events");d=e[a.type];if(e&&d){d=d.slice(0);e=0;for(var j=d.length;e-1?c.map(a.options,function(f){return f.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},fa=function(a,b){var d=a.target,f,e;if(!(!da.test(d.nodeName)||d.readOnly)){f=c.data(d,"_change_data");e=Fa(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_change_data", +e);if(!(f===w||e===f))if(f!=null||e){a.type="change";return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:fa,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return fa.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return fa.call(this,a)},beforeactivate:function(a){a=a.target;c.data(a, +"_change_data",Fa(a))}},setup:function(){if(this.type==="file")return false;for(var a in ea)c.event.add(this,a+".specialChange",ea[a]);return da.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return da.test(this.nodeName)}};ea=c.event.special.change.filters}s.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(f){f=c.event.fix(f);f.type=b;return c.event.handle.call(this,f)}c.event.special[b]={setup:function(){this.addEventListener(a, +d,true)},teardown:function(){this.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,f,e){if(typeof d==="object"){for(var j in d)this[b](j,f,d[j],e);return this}if(c.isFunction(f)){e=f;f=w}var i=b==="one"?c.proxy(e,function(k){c(this).unbind(k,i);return e.apply(this,arguments)}):e;if(d==="unload"&&b!=="one")this.one(d,f,e);else{j=0;for(var o=this.length;j0){y=t;break}}t=t[g]}m[q]=y}}}var f=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^[\]]*\]|['"][^'"]*['"]|[^[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g, +e=0,j=Object.prototype.toString,i=false,o=true;[0,0].sort(function(){o=false;return 0});var k=function(g,h,l,m){l=l||[];var q=h=h||s;if(h.nodeType!==1&&h.nodeType!==9)return[];if(!g||typeof g!=="string")return l;for(var p=[],v,t,y,S,H=true,M=x(h),I=g;(f.exec(""),v=f.exec(I))!==null;){I=v[3];p.push(v[1]);if(v[2]){S=v[3];break}}if(p.length>1&&r.exec(g))if(p.length===2&&n.relative[p[0]])t=ga(p[0]+p[1],h);else for(t=n.relative[p[0]]?[h]:k(p.shift(),h);p.length;){g=p.shift();if(n.relative[g])g+=p.shift(); +t=ga(g,t)}else{if(!m&&p.length>1&&h.nodeType===9&&!M&&n.match.ID.test(p[0])&&!n.match.ID.test(p[p.length-1])){v=k.find(p.shift(),h,M);h=v.expr?k.filter(v.expr,v.set)[0]:v.set[0]}if(h){v=m?{expr:p.pop(),set:z(m)}:k.find(p.pop(),p.length===1&&(p[0]==="~"||p[0]==="+")&&h.parentNode?h.parentNode:h,M);t=v.expr?k.filter(v.expr,v.set):v.set;if(p.length>0)y=z(t);else H=false;for(;p.length;){var D=p.pop();v=D;if(n.relative[D])v=p.pop();else D="";if(v==null)v=h;n.relative[D](y,v,M)}}else y=[]}y||(y=t);y||k.error(D|| +g);if(j.call(y)==="[object Array]")if(H)if(h&&h.nodeType===1)for(g=0;y[g]!=null;g++){if(y[g]&&(y[g]===true||y[g].nodeType===1&&E(h,y[g])))l.push(t[g])}else for(g=0;y[g]!=null;g++)y[g]&&y[g].nodeType===1&&l.push(t[g]);else l.push.apply(l,y);else z(y,l);if(S){k(S,q,l,m);k.uniqueSort(l)}return l};k.uniqueSort=function(g){if(B){i=o;g.sort(B);if(i)for(var h=1;h":function(g,h){var l=typeof h==="string";if(l&&!/\W/.test(h)){h=h.toLowerCase();for(var m=0,q=g.length;m=0))l||m.push(v);else if(l)h[p]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG:function(g){return g[1].toLowerCase()}, +CHILD:function(g){if(g[1]==="nth"){var h=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=h[1]+(h[2]||1)-0;g[3]=h[3]-0}g[0]=e++;return g},ATTR:function(g,h,l,m,q,p){h=g[1].replace(/\\/g,"");if(!p&&n.attrMap[h])g[1]=n.attrMap[h];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,h,l,m,q){if(g[1]==="not")if((f.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=k(g[3],null,null,h);else{g=k.filter(g[3],h,l,true^q);l||m.push.apply(m, +g);return false}else if(n.match.POS.test(g[0])||n.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled===true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,h,l){return!!k(l[3],g).length},header:function(g){return/h\d/i.test(g.nodeName)}, +text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"===g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.nodeName)}}, +setFilters:{first:function(g,h){return h===0},last:function(g,h,l,m){return h===m.length-1},even:function(g,h){return h%2===0},odd:function(g,h){return h%2===1},lt:function(g,h,l){return hl[3]-0},nth:function(g,h,l){return l[3]-0===h},eq:function(g,h,l){return l[3]-0===h}},filter:{PSEUDO:function(g,h,l,m){var q=h[1],p=n.filters[q];if(p)return p(g,l,h,m);else if(q==="contains")return(g.textContent||g.innerText||a([g])||"").indexOf(h[3])>=0;else if(q==="not"){h= +h[3];l=0;for(m=h.length;l=0}},ID:function(g,h){return g.nodeType===1&&g.getAttribute("id")===h},TAG:function(g,h){return h==="*"&&g.nodeType===1||g.nodeName.toLowerCase()===h},CLASS:function(g,h){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(h)>-1},ATTR:function(g,h){var l=h[1];g=n.attrHandle[l]?n.attrHandle[l](g):g[l]!=null?g[l]:g.getAttribute(l);l=g+"";var m=h[2];h=h[4];return g==null?m==="!=":m=== +"="?l===h:m==="*="?l.indexOf(h)>=0:m==="~="?(" "+l+" ").indexOf(h)>=0:!h?l&&g!==false:m==="!="?l!==h:m==="^="?l.indexOf(h)===0:m==="$="?l.substr(l.length-h.length)===h:m==="|="?l===h||l.substr(0,h.length+1)===h+"-":false},POS:function(g,h,l,m){var q=n.setFilters[h[2]];if(q)return q(g,l,h,m)}}},r=n.match.POS;for(var u in n.match){n.match[u]=new RegExp(n.match[u].source+/(?![^\[]*\])(?![^\(]*\))/.source);n.leftMatch[u]=new RegExp(/(^(?:.|\r|\n)*?)/.source+n.match[u].source.replace(/\\(\d+)/g,function(g, +h){return"\\"+(h-0+1)}))}var z=function(g,h){g=Array.prototype.slice.call(g,0);if(h){h.push.apply(h,g);return h}return g};try{Array.prototype.slice.call(s.documentElement.childNodes,0)}catch(C){z=function(g,h){h=h||[];if(j.call(g)==="[object Array]")Array.prototype.push.apply(h,g);else if(typeof g.length==="number")for(var l=0,m=g.length;l";var l=s.documentElement;l.insertBefore(g,l.firstChild);if(s.getElementById(h)){n.find.ID=function(m,q,p){if(typeof q.getElementById!=="undefined"&&!p)return(q=q.getElementById(m[1]))?q.id===m[1]||typeof q.getAttributeNode!=="undefined"&& +q.getAttributeNode("id").nodeValue===m[1]?[q]:w:[]};n.filter.ID=function(m,q){var p=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&p&&p.nodeValue===q}}l.removeChild(g);l=g=null})();(function(){var g=s.createElement("div");g.appendChild(s.createComment(""));if(g.getElementsByTagName("*").length>0)n.find.TAG=function(h,l){l=l.getElementsByTagName(h[1]);if(h[1]==="*"){h=[];for(var m=0;l[m];m++)l[m].nodeType===1&&h.push(l[m]);l=h}return l};g.innerHTML=""; +if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")n.attrHandle.href=function(h){return h.getAttribute("href",2)};g=null})();s.querySelectorAll&&function(){var g=k,h=s.createElement("div");h.innerHTML="

";if(!(h.querySelectorAll&&h.querySelectorAll(".TEST").length===0)){k=function(m,q,p,v){q=q||s;if(!v&&q.nodeType===9&&!x(q))try{return z(q.querySelectorAll(m),p)}catch(t){}return g(m,q,p,v)};for(var l in g)k[l]=g[l];h=null}}(); +(function(){var g=s.createElement("div");g.innerHTML="
";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length===0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){n.order.splice(1,0,"CLASS");n.find.CLASS=function(h,l,m){if(typeof l.getElementsByClassName!=="undefined"&&!m)return l.getElementsByClassName(h[1])};g=null}}})();var E=s.compareDocumentPosition?function(g,h){return!!(g.compareDocumentPosition(h)&16)}: +function(g,h){return g!==h&&(g.contains?g.contains(h):true)},x=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false},ga=function(g,h){var l=[],m="",q;for(h=h.nodeType?[h]:h;q=n.match.PSEUDO.exec(g);){m+=q[0];g=g.replace(n.match.PSEUDO,"")}g=n.relative[g]?g+"*":g;q=0;for(var p=h.length;q=0===d})};c.fn.extend({find:function(a){for(var b=this.pushStack("","find",a),d=0,f=0,e=this.length;f0)for(var j=d;j0},closest:function(a,b){if(c.isArray(a)){var d=[],f=this[0],e,j= +{},i;if(f&&a.length){e=0;for(var o=a.length;e-1:c(f).is(e)){d.push({selector:i,elem:f});delete j[i]}}f=f.parentNode}}return d}var k=c.expr.match.POS.test(a)?c(a,b||this.context):null;return this.map(function(n,r){for(;r&&r.ownerDocument&&r!==b;){if(k?k.index(r)>-1:c(r).is(a))return r;r=r.parentNode}return null})},index:function(a){if(!a||typeof a=== +"string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){a=typeof a==="string"?c(a,b||this.context):c.makeArray(a);b=c.merge(this.get(),a);return this.pushStack(qa(a[0])||qa(b[0])?b:c.unique(b))},andSelf:function(){return this.add(this.prevObject)}});c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"parentNode", +d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a,2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling",d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeName(a,"iframe")? +a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a,b){c.fn[a]=function(d,f){var e=c.map(this,b,d);eb.test(a)||(f=d);if(f&&typeof f==="string")e=c.filter(f,e);e=this.length>1?c.unique(e):e;if((this.length>1||gb.test(f))&&fb.test(a))e=e.reverse();return this.pushStack(e,a,R.call(arguments).join(","))}});c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return c.find.matches(a,b)},dir:function(a,b,d){var f=[];for(a=a[b];a&&a.nodeType!==9&&(d===w||a.nodeType!==1||!c(a).is(d));){a.nodeType=== +1&&f.push(a);a=a[b]}return f},nth:function(a,b,d){b=b||1;for(var f=0;a;a=a[d])if(a.nodeType===1&&++f===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var Ja=/ jQuery\d+="(?:\d+|null)"/g,V=/^\s+/,Ka=/(<([\w:]+)[^>]*?)\/>/g,hb=/^(?:area|br|col|embed|hr|img|input|link|meta|param)$/i,La=/<([\w:]+)/,ib=/"},F={option:[1,""],legend:[1,"
","
"],thead:[1,"","
"],tr:[2,"","
"],td:[3,"","
"],col:[2,"","
"],area:[1,"",""],_default:[0,"",""]};F.optgroup=F.option;F.tbody=F.tfoot=F.colgroup=F.caption=F.thead;F.th=F.td;if(!c.support.htmlSerialize)F._default=[1,"div
","
"];c.fn.extend({text:function(a){if(c.isFunction(a))return this.each(function(b){var d= +c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==w)return this.empty().append((this[0]&&this[0].ownerDocument||s).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this,d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(this)}return this}, +wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})},unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChild(a)})}, +prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a=c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b, +this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,f;(f=this[d])!=null;d++)if(!a||c.filter(a,[f]).length){if(!b&&f.nodeType===1){c.cleanData(f.getElementsByTagName("*"));c.cleanData([f])}f.parentNode&&f.parentNode.removeChild(f)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChild;)b.removeChild(b.firstChild); +return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,f=this.ownerDocument;if(!d){d=f.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(Ja,"").replace(/=([^="'>\s]+\/)>/g,'="$1">').replace(V,"")],f)[0]}else return this.cloneNode(true)});if(a===true){ra(this,b);ra(this.find("*"),b.find("*"))}return b},html:function(a){if(a===w)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(Ja, +""):null;else if(typeof a==="string"&&!ta.test(a)&&(c.support.leadingWhitespace||!V.test(a))&&!F[(La.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Ka,Ma);try{for(var b=0,d=this.length;b0||e.cacheable||this.length>1?k.cloneNode(true):k)}o.length&&c.each(o,Qa)}return this}});c.fragments={};c.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var f=[];d=c(d);var e=this.length===1&&this[0].parentNode;if(e&&e.nodeType===11&&e.childNodes.length===1&&d.length===1){d[b](this[0]); +return this}else{e=0;for(var j=d.length;e0?this.clone(true):this).get();c.fn[b].apply(c(d[e]),i);f=f.concat(i)}return this.pushStack(f,a,d.selector)}}});c.extend({clean:function(a,b,d,f){b=b||s;if(typeof b.createElement==="undefined")b=b.ownerDocument||b[0]&&b[0].ownerDocument||s;for(var e=[],j=0,i;(i=a[j])!=null;j++){if(typeof i==="number")i+="";if(i){if(typeof i==="string"&&!jb.test(i))i=b.createTextNode(i);else if(typeof i==="string"){i=i.replace(Ka,Ma);var o=(La.exec(i)||["", +""])[1].toLowerCase(),k=F[o]||F._default,n=k[0],r=b.createElement("div");for(r.innerHTML=k[1]+i+k[2];n--;)r=r.lastChild;if(!c.support.tbody){n=ib.test(i);o=o==="table"&&!n?r.firstChild&&r.firstChild.childNodes:k[1]===""&&!n?r.childNodes:[];for(k=o.length-1;k>=0;--k)c.nodeName(o[k],"tbody")&&!o[k].childNodes.length&&o[k].parentNode.removeChild(o[k])}!c.support.leadingWhitespace&&V.test(i)&&r.insertBefore(b.createTextNode(V.exec(i)[0]),r.firstChild);i=r.childNodes}if(i.nodeType)e.push(i);else e= +c.merge(e,i)}}if(d)for(j=0;e[j];j++)if(f&&c.nodeName(e[j],"script")&&(!e[j].type||e[j].type.toLowerCase()==="text/javascript"))f.push(e[j].parentNode?e[j].parentNode.removeChild(e[j]):e[j]);else{e[j].nodeType===1&&e.splice.apply(e,[j+1,0].concat(c.makeArray(e[j].getElementsByTagName("script"))));d.appendChild(e[j])}return e},cleanData:function(a){for(var b,d,f=c.cache,e=c.event.special,j=c.support.deleteExpando,i=0,o;(o=a[i])!=null;i++)if(d=o[c.expando]){b=f[d];if(b.events)for(var k in b.events)e[k]? +c.event.remove(o,k):Ca(o,k,b.handle);if(j)delete o[c.expando];else o.removeAttribute&&o.removeAttribute(c.expando);delete f[d]}}});var kb=/z-?index|font-?weight|opacity|zoom|line-?height/i,Na=/alpha\([^)]*\)/,Oa=/opacity=([^)]*)/,ha=/float/i,ia=/-([a-z])/ig,lb=/([A-Z])/g,mb=/^-?\d+(?:px)?$/i,nb=/^-?\d/,ob={position:"absolute",visibility:"hidden",display:"block"},pb=["Left","Right"],qb=["Top","Bottom"],rb=s.defaultView&&s.defaultView.getComputedStyle,Pa=c.support.cssFloat?"cssFloat":"styleFloat",ja= +function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){return X(this,a,b,true,function(d,f,e){if(e===w)return c.curCSS(d,f);if(typeof e==="number"&&!kb.test(f))e+="px";c.style(d,f,e)})};c.extend({style:function(a,b,d){if(!a||a.nodeType===3||a.nodeType===8)return w;if((b==="width"||b==="height")&&parseFloat(d)<0)d=w;var f=a.style||a,e=d!==w;if(!c.support.opacity&&b==="opacity"){if(e){f.zoom=1;b=parseInt(d,10)+""==="NaN"?"":"alpha(opacity="+d*100+")";a=f.filter||c.curCSS(a,"filter")||"";f.filter= +Na.test(a)?a.replace(Na,b):b}return f.filter&&f.filter.indexOf("opacity=")>=0?parseFloat(Oa.exec(f.filter)[1])/100+"":""}if(ha.test(b))b=Pa;b=b.replace(ia,ja);if(e)f[b]=d;return f[b]},css:function(a,b,d,f){if(b==="width"||b==="height"){var e,j=b==="width"?pb:qb;function i(){e=b==="width"?a.offsetWidth:a.offsetHeight;f!=="border"&&c.each(j,function(){f||(e-=parseFloat(c.curCSS(a,"padding"+this,true))||0);if(f==="margin")e+=parseFloat(c.curCSS(a,"margin"+this,true))||0;else e-=parseFloat(c.curCSS(a, +"border"+this+"Width",true))||0})}a.offsetWidth!==0?i():c.swap(a,ob,i);return Math.max(0,Math.round(e))}return c.curCSS(a,b,d)},curCSS:function(a,b,d){var f,e=a.style;if(!c.support.opacity&&b==="opacity"&&a.currentStyle){f=Oa.test(a.currentStyle.filter||"")?parseFloat(RegExp.$1)/100+"":"";return f===""?"1":f}if(ha.test(b))b=Pa;if(!d&&e&&e[b])f=e[b];else if(rb){if(ha.test(b))b="float";b=b.replace(lb,"-$1").toLowerCase();e=a.ownerDocument.defaultView;if(!e)return null;if(a=e.getComputedStyle(a,null))f= +a.getPropertyValue(b);if(b==="opacity"&&f==="")f="1"}else if(a.currentStyle){d=b.replace(ia,ja);f=a.currentStyle[b]||a.currentStyle[d];if(!mb.test(f)&&nb.test(f)){b=e.left;var j=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;e.left=d==="fontSize"?"1em":f||0;f=e.pixelLeft+"px";e.left=b;a.runtimeStyle.left=j}}return f},swap:function(a,b,d){var f={};for(var e in b){f[e]=a.style[e];a.style[e]=b[e]}d.call(a);for(e in b)a.style[e]=f[e]}});if(c.expr&&c.expr.filters){c.expr.filters.hidden=function(a){var b= +a.offsetWidth,d=a.offsetHeight,f=a.nodeName.toLowerCase()==="tr";return b===0&&d===0&&!f?true:b>0&&d>0&&!f?false:c.curCSS(a,"display")==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var sb=J(),tb=//gi,ub=/select|textarea/i,vb=/color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week/i,N=/=\?(&|$)/,ka=/\?/,wb=/(\?|&)_=.*?(&|$)/,xb=/^(\w+:)?\/\/([^\/?#]+)/,yb=/%20/g,zb=c.fn.load;c.fn.extend({load:function(a,b,d){if(typeof a!== +"string")return zb.call(this,a);else if(!this.length)return this;var f=a.indexOf(" ");if(f>=0){var e=a.slice(f,a.length);a=a.slice(0,f)}f="GET";if(b)if(c.isFunction(b)){d=b;b=null}else if(typeof b==="object"){b=c.param(b,c.ajaxSettings.traditional);f="POST"}var j=this;c.ajax({url:a,type:f,dataType:"html",data:b,complete:function(i,o){if(o==="success"||o==="notmodified")j.html(e?c("
").append(i.responseText.replace(tb,"")).find(e):i.responseText);d&&j.each(d,[i.responseText,o,i])}});return this}, +serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||ub.test(this.nodeName)||vb.test(this.type))}).map(function(a,b){a=c(this).val();return a==null?null:c.isArray(a)?c.map(a,function(d){return{name:b.name,value:d}}):{name:b.name,value:a}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "), +function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:f})},getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:f})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url:location.href, +global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:A.XMLHttpRequest&&(A.location.protocol!=="file:"||!A.ActiveXObject)?function(){return new A.XMLHttpRequest}:function(){try{return new A.ActiveXObject("Microsoft.XMLHTTP")}catch(a){}},accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},etag:{},ajax:function(a){function b(){e.success&& +e.success.call(k,o,i,x);e.global&&f("ajaxSuccess",[x,e])}function d(){e.complete&&e.complete.call(k,x,i);e.global&&f("ajaxComplete",[x,e]);e.global&&!--c.active&&c.event.trigger("ajaxStop")}function f(q,p){(e.context?c(e.context):c.event).trigger(q,p)}var e=c.extend(true,{},c.ajaxSettings,a),j,i,o,k=a&&a.context||e,n=e.type.toUpperCase();if(e.data&&e.processData&&typeof e.data!=="string")e.data=c.param(e.data,e.traditional);if(e.dataType==="jsonp"){if(n==="GET")N.test(e.url)||(e.url+=(ka.test(e.url)? +"&":"?")+(e.jsonp||"callback")+"=?");else if(!e.data||!N.test(e.data))e.data=(e.data?e.data+"&":"")+(e.jsonp||"callback")+"=?";e.dataType="json"}if(e.dataType==="json"&&(e.data&&N.test(e.data)||N.test(e.url))){j=e.jsonpCallback||"jsonp"+sb++;if(e.data)e.data=(e.data+"").replace(N,"="+j+"$1");e.url=e.url.replace(N,"="+j+"$1");e.dataType="script";A[j]=A[j]||function(q){o=q;b();d();A[j]=w;try{delete A[j]}catch(p){}z&&z.removeChild(C)}}if(e.dataType==="script"&&e.cache===null)e.cache=false;if(e.cache=== +false&&n==="GET"){var r=J(),u=e.url.replace(wb,"$1_="+r+"$2");e.url=u+(u===e.url?(ka.test(e.url)?"&":"?")+"_="+r:"")}if(e.data&&n==="GET")e.url+=(ka.test(e.url)?"&":"?")+e.data;e.global&&!c.active++&&c.event.trigger("ajaxStart");r=(r=xb.exec(e.url))&&(r[1]&&r[1]!==location.protocol||r[2]!==location.host);if(e.dataType==="script"&&n==="GET"&&r){var z=s.getElementsByTagName("head")[0]||s.documentElement,C=s.createElement("script");C.src=e.url;if(e.scriptCharset)C.charset=e.scriptCharset;if(!j){var B= +false;C.onload=C.onreadystatechange=function(){if(!B&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){B=true;b();d();C.onload=C.onreadystatechange=null;z&&C.parentNode&&z.removeChild(C)}}}z.insertBefore(C,z.firstChild);return w}var E=false,x=e.xhr();if(x){e.username?x.open(n,e.url,e.async,e.username,e.password):x.open(n,e.url,e.async);try{if(e.data||a&&a.contentType)x.setRequestHeader("Content-Type",e.contentType);if(e.ifModified){c.lastModified[e.url]&&x.setRequestHeader("If-Modified-Since", +c.lastModified[e.url]);c.etag[e.url]&&x.setRequestHeader("If-None-Match",c.etag[e.url])}r||x.setRequestHeader("X-Requested-With","XMLHttpRequest");x.setRequestHeader("Accept",e.dataType&&e.accepts[e.dataType]?e.accepts[e.dataType]+", */*":e.accepts._default)}catch(ga){}if(e.beforeSend&&e.beforeSend.call(k,x,e)===false){e.global&&!--c.active&&c.event.trigger("ajaxStop");x.abort();return false}e.global&&f("ajaxSend",[x,e]);var g=x.onreadystatechange=function(q){if(!x||x.readyState===0||q==="abort"){E|| +d();E=true;if(x)x.onreadystatechange=c.noop}else if(!E&&x&&(x.readyState===4||q==="timeout")){E=true;x.onreadystatechange=c.noop;i=q==="timeout"?"timeout":!c.httpSuccess(x)?"error":e.ifModified&&c.httpNotModified(x,e.url)?"notmodified":"success";var p;if(i==="success")try{o=c.httpData(x,e.dataType,e)}catch(v){i="parsererror";p=v}if(i==="success"||i==="notmodified")j||b();else c.handleError(e,x,i,p);d();q==="timeout"&&x.abort();if(e.async)x=null}};try{var h=x.abort;x.abort=function(){x&&h.call(x); +g("abort")}}catch(l){}e.async&&e.timeout>0&&setTimeout(function(){x&&!E&&g("timeout")},e.timeout);try{x.send(n==="POST"||n==="PUT"||n==="DELETE"?e.data:null)}catch(m){c.handleError(e,x,null,m);d()}e.async||g();return x}},handleError:function(a,b,d,f){if(a.error)a.error.call(a.context||a,b,d,f);if(a.global)(a.context?c(a.context):c.event).trigger("ajaxError",[b,a,f])},active:0,httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status===304||a.status=== +1223||a.status===0}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"),f=a.getResponseHeader("Etag");if(d)c.lastModified[b]=d;if(f)c.etag[b]=f;return a.status===304||a.status===0},httpData:function(a,b,d){var f=a.getResponseHeader("content-type")||"",e=b==="xml"||!b&&f.indexOf("xml")>=0;a=e?a.responseXML:a.responseText;e&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="string")if(b=== +"json"||!b&&f.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&f.indexOf("javascript")>=0)c.globalEval(a);return a},param:function(a,b){function d(i,o){if(c.isArray(o))c.each(o,function(k,n){b||/\[\]$/.test(i)?f(i,n):d(i+"["+(typeof n==="object"||c.isArray(n)?k:"")+"]",n)});else!b&&o!=null&&typeof o==="object"?c.each(o,function(k,n){d(i+"["+k+"]",n)}):f(i,o)}function f(i,o){o=c.isFunction(o)?o():o;e[e.length]=encodeURIComponent(i)+"="+encodeURIComponent(o)}var e=[];if(b===w)b=c.ajaxSettings.traditional; +if(c.isArray(a)||a.jquery)c.each(a,function(){f(this.name,this.value)});else for(var j in a)d(j,a[j]);return e.join("&").replace(yb,"+")}});var la={},Ab=/toggle|show|hide/,Bb=/^([+-]=)?([\d+-.]+)(.*)$/,W,va=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b){if(a||a===0)return this.animate(K("show",3),a,b);else{a=0;for(b=this.length;a").appendTo("body");f=e.css("display");if(f==="none")f="block";e.remove();la[d]=f}c.data(this[a],"olddisplay",f)}}a=0;for(b=this.length;a=0;f--)if(d[f].elem===this){b&&d[f](true);d.splice(f,1)}});b||this.dequeue();return this}});c.each({slideDown:K("show",1),slideUp:K("hide",1),slideToggle:K("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(a,b){c.fn[a]=function(d,f){return this.animate(b,d,f)}});c.extend({speed:function(a,b,d){var f=a&&typeof a==="object"?a:{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};f.duration=c.fx.off?0:typeof f.duration=== +"number"?f.duration:c.fx.speeds[f.duration]||c.fx.speeds._default;f.old=f.complete;f.complete=function(){f.queue!==false&&c(this).dequeue();c.isFunction(f.old)&&f.old.call(this)};return f},easing:{linear:function(a,b,d,f){return d+f*a},swing:function(a,b,d,f){return(-Math.cos(a*Math.PI)/2+0.5)*f+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this);(c.fx.step[this.prop]|| +c.fx.step._default)(this);if((this.prop==="height"||this.prop==="width")&&this.elem.style)this.elem.style.display="block"},cur:function(a){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];return(a=parseFloat(c.css(this.elem,this.prop,a)))&&a>-10000?a:parseFloat(c.curCSS(this.elem,this.prop))||0},custom:function(a,b,d){function f(j){return e.step(j)}this.startTime=J();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.now=this.start; +this.pos=this.state=0;var e=this;f.elem=this.elem;if(f()&&c.timers.push(f)&&!W)W=setInterval(c.fx.tick,13)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true;this.custom(this.cur(),0)},step:function(a){var b=J(),d=true;if(a||b>=this.options.duration+this.startTime){this.now= +this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var f in this.options.curAnim)if(this.options.curAnim[f]!==true)d=false;if(d){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;a=c.data(this.elem,"olddisplay");this.elem.style.display=a?a:this.options.display;if(c.css(this.elem,"display")==="none")this.elem.style.display="block"}this.options.hide&&c(this.elem).hide();if(this.options.hide||this.options.show)for(var e in this.options.curAnim)c.style(this.elem, +e,this.options.orig[e]);this.options.complete.call(this.elem)}return false}else{e=b-this.startTime;this.state=e/this.options.duration;a=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||a](this.state,e,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a=c.timers,b=0;b
"; +a.insertBefore(b,a.firstChild);d=b.firstChild;f=d.firstChild;e=d.nextSibling.firstChild.firstChild;this.doesNotAddBorder=f.offsetTop!==5;this.doesAddBorderForTableAndCells=e.offsetTop===5;f.style.position="fixed";f.style.top="20px";this.supportsFixedPosition=f.offsetTop===20||f.offsetTop===15;f.style.position=f.style.top="";d.style.overflow="hidden";d.style.position="relative";this.subtractsBorderForOverflowNotVisible=f.offsetTop===-5;this.doesNotIncludeMarginInBodyOffset=a.offsetTop!==j;a.removeChild(b); +c.offset.initialize=c.noop},bodyOffset:function(a){var b=a.offsetTop,d=a.offsetLeft;c.offset.initialize();if(c.offset.doesNotIncludeMarginInBodyOffset){b+=parseFloat(c.curCSS(a,"marginTop",true))||0;d+=parseFloat(c.curCSS(a,"marginLeft",true))||0}return{top:b,left:d}},setOffset:function(a,b,d){if(/static/.test(c.curCSS(a,"position")))a.style.position="relative";var f=c(a),e=f.offset(),j=parseInt(c.curCSS(a,"top",true),10)||0,i=parseInt(c.curCSS(a,"left",true),10)||0;if(c.isFunction(b))b=b.call(a, +d,e);d={top:b.top-e.top+j,left:b.left-e.left+i};"using"in b?b.using.call(a,d):f.css(d)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),f=/^body|html$/i.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.curCSS(a,"marginTop",true))||0;d.left-=parseFloat(c.curCSS(a,"marginLeft",true))||0;f.top+=parseFloat(c.curCSS(b[0],"borderTopWidth",true))||0;f.left+=parseFloat(c.curCSS(b[0],"borderLeftWidth",true))||0;return{top:d.top- +f.top,left:d.left-f.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||s.body;a&&!/^body|html$/i.test(a.nodeName)&&c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(f){var e=this[0],j;if(!e)return null;if(f!==w)return this.each(function(){if(j=wa(this))j.scrollTo(!a?f:c(j).scrollLeft(),a?f:c(j).scrollTop());else this[d]=f});else return(j=wa(e))?"pageXOffset"in j?j[a?"pageYOffset": +"pageXOffset"]:c.support.boxModel&&j.document.documentElement[d]||j.document.body[d]:e[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase();c.fn["inner"+b]=function(){return this[0]?c.css(this[0],d,false,"padding"):null};c.fn["outer"+b]=function(f){return this[0]?c.css(this[0],d,false,f?"margin":"border"):null};c.fn[d]=function(f){var e=this[0];if(!e)return f==null?null:this;if(c.isFunction(f))return this.each(function(j){var i=c(this);i[d](f.call(this,j,i[d]()))});return"scrollTo"in +e&&e.document?e.document.compatMode==="CSS1Compat"&&e.document.documentElement["client"+b]||e.document.body["client"+b]:e.nodeType===9?Math.max(e.documentElement["client"+b],e.body["scroll"+b],e.documentElement["scroll"+b],e.body["offset"+b],e.documentElement["offset"+b]):f===w?c.css(e,d):this.css(d,typeof f==="string"?f:f+"px")}});A.jQuery=A.$=c})(window); Binary files /tmp/DjhtJwK2X2/couchdb-1.2.0/share/doc/build/html/_static/logo.png and /tmp/T3VxxJYoFO/couchdb-1.4.0~rc.1/share/doc/build/html/_static/logo.png differ Binary files /tmp/DjhtJwK2X2/couchdb-1.2.0/share/doc/build/html/_static/minus.png and /tmp/T3VxxJYoFO/couchdb-1.4.0~rc.1/share/doc/build/html/_static/minus.png differ Binary files /tmp/DjhtJwK2X2/couchdb-1.2.0/share/doc/build/html/_static/plus.png and /tmp/T3VxxJYoFO/couchdb-1.4.0~rc.1/share/doc/build/html/_static/plus.png differ diff -Nru couchdb-1.2.0/share/doc/build/html/_static/pygments.css couchdb-1.4.0~rc.1/share/doc/build/html/_static/pygments.css --- couchdb-1.2.0/share/doc/build/html/_static/pygments.css 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_static/pygments.css 2013-08-23 10:59:02.000000000 -0400 @@ -0,0 +1,62 @@ +.highlight .hll { background-color: #ffffcc } +.highlight { background: #eeffcc; } +.highlight .c { color: #408090; font-style: italic } /* Comment */ +.highlight .err { border: 1px solid #FF0000 } /* Error */ +.highlight .k { color: #007020; font-weight: bold } /* Keyword */ +.highlight .o { color: #666666 } /* Operator */ +.highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */ +.highlight .cp { color: #007020 } /* Comment.Preproc */ +.highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */ +.highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */ +.highlight .gd { color: #A00000 } /* Generic.Deleted */ +.highlight .ge { font-style: italic } /* Generic.Emph */ +.highlight .gr { color: #FF0000 } /* Generic.Error */ +.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ +.highlight .gi { color: #00A000 } /* Generic.Inserted */ +.highlight .go { color: #333333 } /* Generic.Output */ +.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ +.highlight .gs { font-weight: bold } /* Generic.Strong */ +.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ +.highlight .gt { color: #0044DD } /* Generic.Traceback */ +.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */ +.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */ +.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */ +.highlight .kp { color: #007020 } /* Keyword.Pseudo */ +.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */ +.highlight .kt { color: #902000 } /* Keyword.Type */ +.highlight .m { color: #208050 } /* Literal.Number */ +.highlight .s { color: #4070a0 } /* Literal.String */ +.highlight .na { color: #4070a0 } /* Name.Attribute */ +.highlight .nb { color: #007020 } /* Name.Builtin */ +.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */ +.highlight .no { color: #60add5 } /* Name.Constant */ +.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */ +.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */ +.highlight .ne { color: #007020 } /* Name.Exception */ +.highlight .nf { color: #06287e } /* Name.Function */ +.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */ +.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ +.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */ +.highlight .nv { color: #bb60d5 } /* Name.Variable */ +.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */ +.highlight .w { color: #bbbbbb } /* Text.Whitespace */ +.highlight .mf { color: #208050 } /* Literal.Number.Float */ +.highlight .mh { color: #208050 } /* Literal.Number.Hex */ +.highlight .mi { color: #208050 } /* Literal.Number.Integer */ +.highlight .mo { color: #208050 } /* Literal.Number.Oct */ +.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */ +.highlight .sc { color: #4070a0 } /* Literal.String.Char */ +.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */ +.highlight .s2 { color: #4070a0 } /* Literal.String.Double */ +.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */ +.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */ +.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */ +.highlight .sx { color: #c65d09 } /* Literal.String.Other */ +.highlight .sr { color: #235388 } /* Literal.String.Regex */ +.highlight .s1 { color: #4070a0 } /* Literal.String.Single */ +.highlight .ss { color: #517918 } /* Literal.String.Symbol */ +.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */ +.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */ +.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */ +.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */ +.highlight .il { color: #208050 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff -Nru couchdb-1.2.0/share/doc/build/html/_static/rtd.css couchdb-1.4.0~rc.1/share/doc/build/html/_static/rtd.css --- couchdb-1.2.0/share/doc/build/html/_static/rtd.css 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_static/rtd.css 2013-08-23 10:57:21.000000000 -0400 @@ -0,0 +1,795 @@ +/* + * rtd.css + * ~~~~~~~~~~~~~~~ + * + * Sphinx stylesheet -- sphinxdoc theme. Originally created by + * Armin Ronacher for Werkzeug. + * + * Customized for ReadTheDocs by Eric Pierce & Eric Holscher + * + * :copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/* RTD colors + * light blue: #e8ecef + * medium blue: #8ca1af + * dark blue: #465158 + * dark grey: #444444 + * + * white hover: #d1d9df; + * medium blue hover: #697983; + * green highlight: #8ecc4c + * light blue (project bar): #e8ecef + */ + +@import url("basic.css"); + +/* PAGE LAYOUT -------------------------------------------------------------- */ + +body { + font: 100%/1.5 "ff-meta-web-pro-1","ff-meta-web-pro-2",Arial,"Helvetica Neue",sans-serif; + text-align: center; + color: black; + background-color: #465158; + padding: 0; + margin: 0; +} + +div.document { + text-align: left; + background-color: #e8ecef; +} + +div.bodywrapper { + background-color: #ffffff; + border-left: 1px solid #ccc; + border-bottom: 1px solid #ccc; + margin: 0 0 0 16em; +} + +div.body { + margin: 0; + padding: 0.5em 1.3em; + min-width: 20em; +} + +div.related { + font-size: 1em; + background-color: #465158; +} + +div.documentwrapper { + float: left; + width: 100%; + background-color: #e8ecef; +} + + +/* HEADINGS --------------------------------------------------------------- */ + +h1 { + margin: 0; + padding: 0.7em 0 0.3em 0; + font-size: 1.5em; + line-height: 1.15; + color: #111; + clear: both; +} + +h2 { + margin: 2em 0 0.2em 0; + font-size: 1.35em; + padding: 0; + color: #465158; +} + +h3 { + margin: 1em 0 -0.3em 0; + font-size: 1.2em; + color: #6c818f; +} + +div.body h1 a, div.body h2 a, div.body h3 a, div.body h4 a, div.body h5 a, div.body h6 a { + color: black; +} + +h1 a.anchor, h2 a.anchor, h3 a.anchor, h4 a.anchor, h5 a.anchor, h6 a.anchor { + display: none; + margin: 0 0 0 0.3em; + padding: 0 0.2em 0 0.2em; + color: #aaa !important; +} + +h1:hover a.anchor, h2:hover a.anchor, h3:hover a.anchor, h4:hover a.anchor, +h5:hover a.anchor, h6:hover a.anchor { + display: inline; +} + +h1 a.anchor:hover, h2 a.anchor:hover, h3 a.anchor:hover, h4 a.anchor:hover, +h5 a.anchor:hover, h6 a.anchor:hover { + color: #777; + background-color: #eee; +} + + +/* LINKS ------------------------------------------------------------------ */ + +/* Normal links get a pseudo-underline */ +a { + color: #444; + text-decoration: none; + border-bottom: 1px solid #ccc; +} + +/* Links in sidebar, TOC, index trees and tables have no underline */ +.sphinxsidebar a, +.toctree-wrapper a, +.indextable a, +#indices-and-tables a { + color: #444; + text-decoration: none; + border-bottom: none; +} + +/* Most links get an underline-effect when hovered */ +a:hover, +div.toctree-wrapper a:hover, +.indextable a:hover, +#indices-and-tables a:hover { + color: #111; + text-decoration: none; + border-bottom: 1px solid #111; +} + +/* Footer links */ +div.footer a { + color: #86989B; + text-decoration: none; + border: none; +} +div.footer a:hover { + color: #a6b8bb; + text-decoration: underline; + border: none; +} + +/* Permalink anchor (subtle grey with a red hover) */ +div.body a.headerlink { + color: #ccc; + font-size: 1em; + margin-left: 6px; + padding: 0 4px 0 4px; + text-decoration: none; + border: none; +} +div.body a.headerlink:hover { + color: #c60f0f; + border: none; +} + + +/* NAVIGATION BAR --------------------------------------------------------- */ + +div.related ul { + height: 2.5em; +} + +div.related ul li { + margin: 0; + padding: 0.65em 0; + float: left; + display: block; + color: white; /* For the >> separators */ + font-size: 0.8em; +} + +div.related ul li.right { + float: right; + margin-right: 5px; + color: transparent; /* Hide the | separators */ +} + +/* "Breadcrumb" links in nav bar */ +div.related ul li a { + order: none; + background-color: inherit; + font-weight: bold; + margin: 6px 0 6px 4px; + line-height: 1.75em; + color: #ffffff; + padding: 0.4em 0.8em; + border: none; + border-radius: 3px; +} +/* previous / next / modules / index links look more like buttons */ +div.related ul li.right a { + margin: 0.375em 0; + background-color: #697983; + text-shadow: 0 1px rgba(0, 0, 0, 0.5); + border-radius: 3px; + -webkit-border-radius: 3px; + -moz-border-radius: 3px; +} +/* All navbar links light up as buttons when hovered */ +div.related ul li a:hover { + background-color: #8ca1af; + color: #ffffff; + text-decoration: none; + border-radius: 3px; + -webkit-border-radius: 3px; + -moz-border-radius: 3px; +} +/* Take extra precautions for tt within links */ +a tt, +div.related ul li a tt { + background: inherit !important; + color: inherit !important; +} + + +/* SIDEBAR ---------------------------------------------------------------- */ + +div.sphinxsidebarwrapper { + padding: 0; +} + +div.sphinxsidebar { + margin: 0; + margin-left: -100%; + float: left; + top: 3em; + left: 0; + padding: 0 1em; + width: 14em; + font-size: 1em; + text-align: left; + background-color: #e8ecef; +} + +div.sphinxsidebar img { + max-width: 12em; +} + +div.sphinxsidebar h3, +div.sphinxsidebar h4, +div.sphinxsidebar p.logo { + margin: 1.2em 0 0.3em 0; + font-size: 1em; + padding: 0; + color: #222222; + font-family: "ff-meta-web-pro-1", "ff-meta-web-pro-2", "Arial", "Helvetica Neue", sans-serif; +} + +div.sphinxsidebar h3 a { + color: #444444; +} + +div.sphinxsidebar ul, +div.sphinxsidebar p { + margin-top: 0; + padding-left: 0; + line-height: 130%; + background-color: #e8ecef; +} + +/* No bullets for nested lists, but a little extra indentation */ +div.sphinxsidebar ul ul { + list-style-type: none; + margin-left: 1.5em; + padding: 0; +} + +/* A little top/bottom padding to prevent adjacent links' borders + * from overlapping each other */ +div.sphinxsidebar ul li { + padding: 1px 0; +} + +/* A little left-padding to make these align with the ULs */ +div.sphinxsidebar p.topless { + padding-left: 0 0 0 1em; +} + +/* Make these into hidden one-liners */ +div.sphinxsidebar ul li, +div.sphinxsidebar p.topless { + white-space: nowrap; + overflow: hidden; +} +/* ...which become visible when hovered */ +div.sphinxsidebar ul li:hover, +div.sphinxsidebar p.topless:hover { + overflow: visible; +} + +/* Search text box and "Go" button */ +#searchbox { + margin-top: 2em; + margin-bottom: 1em; + background: #ddd; + padding: 0.5em; + border-radius: 6px; + -moz-border-radius: 6px; + -webkit-border-radius: 6px; +} +#searchbox h3 { + margin-top: 0; +} + +/* Make search box and button abut and have a border */ +input, +div.sphinxsidebar input { + border: 1px solid #999; + float: left; +} + +/* Search textbox */ +input[type="text"] { + margin: 0; + padding: 0 3px; + height: 20px; + width: 144px; + border-top-left-radius: 3px; + border-bottom-left-radius: 3px; + -moz-border-radius-topleft: 3px; + -moz-border-radius-bottomleft: 3px; + -webkit-border-top-left-radius: 3px; + -webkit-border-bottom-left-radius: 3px; +} +/* Search button */ +input[type="submit"] { + margin: 0 0 0 -1px; /* -1px prevents a double-border with textbox */ + height: 22px; + color: #444; + background-color: #e8ecef; + padding: 1px 4px; + font-weight: bold; + border-top-right-radius: 3px; + border-bottom-right-radius: 3px; + -moz-border-radius-topright: 3px; + -moz-border-radius-bottomright: 3px; + -webkit-border-top-right-radius: 3px; + -webkit-border-bottom-right-radius: 3px; +} +input[type="submit"]:hover { + color: #ffffff; + background-color: #8ecc4c; +} + +div.sphinxsidebar p.searchtip { + clear: both; + padding: 0.5em 0 0 0; + background: #ddd; + color: #666; + font-size: 0.9em; +} + +/* Sidebar links are unusual */ +div.sphinxsidebar li a, +div.sphinxsidebar p a { + background: #e8ecef; /* In case links overlap main content */ + border-radius: 3px; + -moz-border-radius: 3px; + -webkit-border-radius: 3px; + border: 1px solid transparent; /* To prevent things jumping around on hover */ + padding: 0 5px 0 5px; +} +div.sphinxsidebar li a:hover, +div.sphinxsidebar p a:hover { + color: #111; + text-decoration: none; + border: 1px solid #888; +} +div.sphinxsidebar p.logo a { + border: 0; +} + +/* Tweak any link appearing in a heading */ +div.sphinxsidebar h3 a { +} + + + + +/* OTHER STUFF ------------------------------------------------------------ */ + +cite, code, tt { + font-family: 'Consolas', 'Deja Vu Sans Mono', + 'Bitstream Vera Sans Mono', monospace; + font-size: 0.95em; + letter-spacing: 0.01em; +} + +tt { + background-color: #f2f2f2; + color: #444; +} + +tt.descname, tt.descclassname, tt.xref { + border: 0; +} + +hr { + border: 1px solid #abc; + margin: 2em; +} + + +pre, #_fontwidthtest { + font-family: 'Consolas', 'Deja Vu Sans Mono', + 'Bitstream Vera Sans Mono', monospace; + margin: 1em 2em; + font-size: 0.95em; + letter-spacing: 0.015em; + line-height: 120%; + padding: 0.5em; + border: 1px solid #ccc; + background-color: #eee; + border-radius: 6px; + -moz-border-radius: 6px; + -webkit-border-radius: 6px; +} + +pre a { + color: inherit; + text-decoration: underline; +} + +td.linenos pre { + margin: 1em 0em; +} + +td.code pre { + margin: 1em 0em; +} + +div.quotebar { + background-color: #f8f8f8; + max-width: 250px; + float: right; + padding: 2px 7px; + border: 1px solid #ccc; +} + +div.topic { + background-color: #f8f8f8; +} + +table { + border-collapse: collapse; + margin: 0 -0.5em 0 -0.5em; +} + +table td, table th { + padding: 0.2em 0.5em 0.2em 0.5em; +} + + +/* ADMONITIONS AND WARNINGS ------------------------------------------------- */ + +/* Shared by admonitions, warnings and sidebars */ +div.admonition, +div.warning, +div.sidebar { + font-size: 0.9em; + margin: 2em; + padding: 0; + /* + border-radius: 6px; + -moz-border-radius: 6px; + -webkit-border-radius: 6px; + */ +} +div.admonition p, +div.warning p, +div.sidebar p { + margin: 0.5em 1em 0.5em 1em; + padding: 0; +} +div.admonition pre, +div.warning pre, +div.sidebar pre { + margin: 0.4em 1em 0.4em 1em; +} +div.admonition p.admonition-title, +div.warning p.admonition-title, +div.sidebar p.sidebar-title { + margin: 0; + padding: 0.1em 0 0.1em 0.5em; + color: white; + font-weight: bold; + font-size: 1.1em; + text-shadow: 0 1px rgba(0, 0, 0, 0.5); +} +div.admonition ul, div.admonition ol, +div.warning ul, div.warning ol, +div.sidebar ul, div.sidebar ol { + margin: 0.1em 0.5em 0.5em 3em; + padding: 0; +} + + +/* Admonitions and sidebars only */ +div.admonition, div.sidebar { + border: 1px solid #609060; + background-color: #e9ffe9; +} +div.admonition p.admonition-title, +div.sidebar p.sidebar-title { + background-color: #70A070; + border-bottom: 1px solid #609060; +} + + +/* Warnings only */ +div.warning { + border: 1px solid #900000; + background-color: #ffe9e9; +} +div.warning p.admonition-title { + background-color: #b04040; + border-bottom: 1px solid #900000; +} + + +/* Sidebars only */ +div.sidebar { + max-width: 30%; +} + + + +div.versioninfo { + margin: 1em 0 0 0; + border: 1px solid #ccc; + background-color: #DDEAF0; + padding: 8px; + line-height: 1.3em; + font-size: 0.9em; +} + +.viewcode-back { + font-family: 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', + 'Verdana', sans-serif; +} + +div.viewcode-block:target { + background-color: #f4debf; + border-top: 1px solid #ac9; + border-bottom: 1px solid #ac9; +} + +dl { + margin: 1em 0 2.5em 0; +} + +/* Highlight target when you click an internal link */ +dt:target { + background: #ffe080; +} +/* Don't highlight whole divs */ +div.highlight { + background: transparent; +} +/* But do highlight spans (so search results can be highlighted) */ +span.highlight { + background: #ffe080; +} + +div.footer { + background-color: #465158; + color: #eeeeee; + padding: 0 2em 2em 2em; + clear: both; + font-size: 0.8em; + text-align: center; +} + +p { + margin: 0.8em 0 0.5em 0; +} + +.section p img.math { + margin: 0; +} + + +.section p img { + margin: 1em 2em; +} + + +/* MOBILE LAYOUT -------------------------------------------------------------- */ + +@media screen and (max-width: 600px) { + + h1, h2, h3, h4, h5 { + position: relative; + } + + ul { + padding-left: 1.25em; + } + + div.bodywrapper a.headerlink, #indices-and-tables h1 a { + color: #e6e6e6; + font-size: 80%; + float: right; + line-height: 1.8; + position: absolute; + right: -0.7em; + visibility: inherit; + } + + div.bodywrapper h1 a.headerlink, #indices-and-tables h1 a { + line-height: 1.5; + } + + pre { + font-size: 0.7em; + overflow: auto; + word-wrap: break-word; + white-space: pre-wrap; + } + + div.related ul { + height: 2.5em; + padding: 0; + text-align: left; + } + + div.related ul li { + clear: both; + color: #465158; + padding: 0.2em 0; + } + + div.related ul li:last-child { + border-bottom: 1px dotted #8ca1af; + padding-bottom: 0.4em; + margin-bottom: 1em; + width: 100%; + } + + div.related ul li a { + color: #465158; + padding-right: 0; + } + + div.related ul li a:hover { + background: inherit; + color: inherit; + } + + div.related ul li.right { + clear: none; + padding: 0.65em 0; + margin-bottom: 0.5em; + } + + div.related ul li.right a { + color: #fff; + padding-right: 0.8em; + } + + div.related ul li.right a:hover { + background-color: #8ca1af; + } + + div.body { + clear: both; + min-width: 0; + word-wrap: break-word; + } + + div.bodywrapper { + margin: 0 0 0 0; + } + + div.sphinxsidebar { + float: none; + margin: 0; + width: auto; + } + + div.sphinxsidebar input[type="text"] { + height: 2em; + line-height: 2em; + width: 70%; + } + + div.sphinxsidebar input[type="submit"] { + height: 2em; + margin-left: 0.5em; + width: 20%; + } + + div.sphinxsidebar p.searchtip { + background: inherit; + margin-bottom: 1em; + } + + div.sphinxsidebar ul li, div.sphinxsidebar p.topless { + white-space: normal; + } + + .bodywrapper img { + display: block; + margin-left: auto; + margin-right: auto; + max-width: 100%; + } + + div.documentwrapper { + float: none; + } + + div.admonition, div.warning, pre, blockquote { + margin-left: 0em; + margin-right: 0em; + } + + .body p img { + margin: 0; + } + + #searchbox { + background: transparent; + } + + .related:not(:first-child) li { + display: none; + } + + .related:not(:first-child) li.right { + display: block; + } + + div.footer { + padding: 1em; + } + + .rtd_doc_footer .rtd-badge { + float: none; + margin: 1em auto; + position: static; + } + + .rtd_doc_footer .rtd-badge.revsys-inline { + margin-right: auto; + margin-bottom: 2em; + } + + table.indextable { + display: block; + width: auto; + } + + .indextable tr { + display: block; + } + + .indextable td { + display: block; + padding: 0; + width: auto !important; + } + + .indextable td dt { + margin: 1em 0; + } + + ul.search { + margin-left: 0.25em; + } + + ul.search li div.context { + font-size: 90%; + line-height: 1.1; + margin-bottom: 1; + margin-left: 0; + } + +} diff -Nru couchdb-1.2.0/share/doc/build/html/_static/searchtools.js couchdb-1.4.0~rc.1/share/doc/build/html/_static/searchtools.js --- couchdb-1.2.0/share/doc/build/html/_static/searchtools.js 1969-12-31 19:00:00.000000000 -0500 +++ couchdb-1.4.0~rc.1/share/doc/build/html/_static/searchtools.js 2013-08-23 10:59:02.000000000 -0400 @@ -0,0 +1,560 @@ +/* + * searchtools.js_t + * ~~~~~~~~~~~~~~~~ + * + * Sphinx JavaScript utilties for the full-text search. + * + * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +/** + * helper function to return a node containing the + * search summary for a given text. keywords is a list + * of stemmed words, hlwords is the list of normal, unstemmed + * words. the first one is used to find the occurance, the + * latter for highlighting it. + */ + +jQuery.makeSearchSummary = function(text, keywords, hlwords) { + var textLower = text.toLowerCase(); + var start = 0; + $.each(keywords, function() { + var i = textLower.indexOf(this.toLowerCase()); + if (i > -1) + start = i; + }); + start = Math.max(start - 120, 0); + var excerpt = ((start > 0) ? '...' : '') + + $.trim(text.substr(start, 240)) + + ((start + 240 - text.length) ? '...' : ''); + var rv = $('
').text(excerpt); + $.each(hlwords, function() { + rv = rv.highlightText(this, 'highlighted'); + }); + return rv; +} + + +/** + * Porter Stemmer + */ +var Stemmer = function() { + + var step2list = { + ational: 'ate', + tional: 'tion', + enci: 'ence', + anci: 'ance', + izer: 'ize', + bli: 'ble', + alli: 'al', + entli: 'ent', + eli: 'e', + ousli: 'ous', + ization: 'ize', + ation: 'ate', + ator: 'ate', + alism: 'al', + iveness: 'ive', + fulness: 'ful', + ousness: 'ous', + aliti: 'al', + iviti: 'ive', + biliti: 'ble', + logi: 'log' + }; + + var step3list = { + icate: 'ic', + ative: '', + alize: 'al', + iciti: 'ic', + ical: 'ic', + ful: '', + ness: '' + }; + + var c = "[^aeiou]"; // consonant + var v = "[aeiouy]"; // vowel + var C = c + "[^aeiouy]*"; // consonant sequence + var V = v + "[aeiou]*"; // vowel sequence + + var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 + var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 + var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 + var s_v = "^(" + C + ")?" + v; // vowel in stem + + this.stemWord = function (w) { + var stem; + var suffix; + var firstch; + var origword = w; + + if (w.length < 3) + return w; + + var re; + var re2; + var re3; + var re4; + + firstch = w.substr(0,1); + if (firstch == "y") + w = firstch.toUpperCase() + w.substr(1); + + // Step 1a + re = /^(.+?)(ss|i)es$/; + re2 = /^(.+?)([^s])s$/; + + if (re.test(w)) + w = w.replace(re,"$1$2"); + else if (re2.test(w)) + w = w.replace(re2,"$1$2"); + + // Step 1b + re = /^(.+?)eed$/; + re2 = /^(.+?)(ed|ing)$/; + if (re.test(w)) { + var fp = re.exec(w); + re = new RegExp(mgr0); + if (re.test(fp[1])) { + re = /.$/; + w = w.replace(re,""); + } + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1]; + re2 = new RegExp(s_v); + if (re2.test(stem)) { + w = stem; + re2 = /(at|bl|iz)$/; + re3 = new RegExp("([^aeiouylsz])\\1$"); + re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re2.test(w)) + w = w + "e"; + else if (re3.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + else if (re4.test(w)) + w = w + "e"; + } + } + + // Step 1c + re = /^(.+?)y$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(s_v); + if (re.test(stem)) + w = stem + "i"; + } + + // Step 2 + re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step2list[suffix]; + } + + // Step 3 + re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + suffix = fp[2]; + re = new RegExp(mgr0); + if (re.test(stem)) + w = stem + step3list[suffix]; + } + + // Step 4 + re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; + re2 = /^(.+?)(s|t)(ion)$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + if (re.test(stem)) + w = stem; + } + else if (re2.test(w)) { + var fp = re2.exec(w); + stem = fp[1] + fp[2]; + re2 = new RegExp(mgr1); + if (re2.test(stem)) + w = stem; + } + + // Step 5 + re = /^(.+?)e$/; + if (re.test(w)) { + var fp = re.exec(w); + stem = fp[1]; + re = new RegExp(mgr1); + re2 = new RegExp(meq1); + re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); + if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) + w = stem; + } + re = /ll$/; + re2 = new RegExp(mgr1); + if (re.test(w) && re2.test(w)) { + re = /.$/; + w = w.replace(re,""); + } + + // and turn initial Y back to y + if (firstch == "y") + w = firstch.toLowerCase() + w.substr(1); + return w; + } +} + + +/** + * Search Module + */ +var Search = { + + _index : null, + _queued_query : null, + _pulse_status : -1, + + init : function() { + var params = $.getQueryParameters(); + if (params.q) { + var query = params.q[0]; + $('input[name="q"]')[0].value = query; + this.performSearch(query); + } + }, + + loadIndex : function(url) { + $.ajax({type: "GET", url: url, data: null, success: null, + dataType: "script", cache: true}); + }, + + setIndex : function(index) { + var q; + this._index = index; + if ((q = this._queued_query) !== null) { + this._queued_query = null; + Search.query(q); + } + }, + + hasIndex : function() { + return this._index !== null; + }, + + deferQuery : function(query) { + this._queued_query = query; + }, + + stopPulse : function() { + this._pulse_status = 0; + }, + + startPulse : function() { + if (this._pulse_status >= 0) + return; + function pulse() { + Search._pulse_status = (Search._pulse_status + 1) % 4; + var dotString = ''; + for (var i = 0; i < Search._pulse_status; i++) + dotString += '.'; + Search.dots.text(dotString); + if (Search._pulse_status > -1) + window.setTimeout(pulse, 500); + }; + pulse(); + }, + + /** + * perform a search for something + */ + performSearch : function(query) { + // create the required interface elements + this.out = $('#search-results'); + this.title = $('

' + _('Searching') + '

').appendTo(this.out); + this.dots = $('').appendTo(this.title); + this.status = $('

').appendTo(this.out); + this.output = $('