More eol cleanups

git-svn-id: https://unknownworlds.svn.cloudforge.com/ns1@6 67975925-1194-0748-b3d5-c16f83f1a3a1
This commit is contained in:
puzl 2005-03-10 13:16:40 +00:00
parent 1a2b6e1947
commit 48e8eda3ed
7 changed files with 3893 additions and 3313 deletions

View file

@ -1,76 +1,664 @@
#############################################################################
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
# Makefile.in generated by automake 1.7.2 from Makefile.am.
# Makefile. Generated from Makefile.in by configure.
# Copyright 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002
# Free Software Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
#
# Copyright (C) 2002, Daniel Stenberg, <daniel@haxx.se>, et al.
# $Id: Makefile.am,v 1.41 2003/03/31 11:37:47 bagder Exp $
#
# In order to be useful for every potential user, curl and libcurl are
# dual-licensed under the MPL and the MIT/X-derivate licenses.
srcdir = .
top_srcdir = .
pkgdatadir = $(datadir)/curl
pkglibdir = $(libdir)/curl
pkgincludedir = $(includedir)/curl
top_builddir = .
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
INSTALL = /usr/bin/install -c
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(program_transform_name)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
host_triplet = i686-pc-linux-gnu
ACLOCAL = ${SHELL} /home/hjw/ns/source/curl/missing --run aclocal-1.7
AMDEP_FALSE = #
AMDEP_TRUE =
AMTAR = ${SHELL} /home/hjw/ns/source/curl/missing --run tar
AR = ar
AS = as
AUTOCONF = ${SHELL} /home/hjw/ns/source/curl/missing --run autoconf
AUTOHEADER = ${SHELL} /home/hjw/ns/source/curl/missing --run autoheader
AUTOMAKE = ${SHELL} /home/hjw/ns/source/curl/missing --run automake-1.7
AWK = gawk
CC = gcc
CCDEPMODE = depmode=gcc3
CFLAGS = -g -O2
CPP = gcc -E
CPPFLAGS =
CURL_CA_BUNDLE =
CURL_DISABLE_DICT =
CURL_DISABLE_FILE =
CURL_DISABLE_FTP =
CURL_DISABLE_GOPHER =
CURL_DISABLE_HTTP =
CURL_DISABLE_LDAP =
CURL_DISABLE_TELNET =
CXX = g++
CXXCPP = g++ -E
CXXDEPMODE = depmode=gcc3
CXXFLAGS = -g -O2
CYGPATH_W = echo
DEFS = -DHAVE_CONFIG_H
DEPDIR = .deps
DLLTOOL = dlltool
ECHO = echo
ECHO_C =
ECHO_N = -n
ECHO_T =
EGREP = grep -E
EXEEXT =
F77 = g77
FFLAGS = -g -O2
HAVE_LIBZ =
INSTALL_DATA = ${INSTALL} -m 644
INSTALL_PROGRAM = ${INSTALL}
INSTALL_SCRIPT = ${INSTALL}
INSTALL_STRIP_PROGRAM = ${SHELL} $(install_sh) -c -s
IPV6_ENABLED =
KRB4_ENABLED =
LDFLAGS =
LIBOBJS =
LIBS = -ldl
LIBTOOL = $(SHELL) $(top_builddir)/libtool
LN_S = ln -s
LTLIBOBJS =
MAINT = #
MAINTAINER_MODE_FALSE =
MAINTAINER_MODE_TRUE = #
MAKEINFO = ${SHELL} /home/hjw/ns/source/curl/missing --run makeinfo
NO_UNDEFINED_FALSE =
NO_UNDEFINED_TRUE = #
NROFF = /usr/bin/gnroff
OBJDUMP = objdump
OBJEXT = o
OPENSSL_ENABLED =
PACKAGE = curl
PACKAGE_BUGREPORT = curl-bug@haxx.se
PACKAGE_NAME = curl
PACKAGE_STRING = curl -
PACKAGE_TARNAME = curl
PACKAGE_VERSION = -
PATH_SEPARATOR = :
PERL = /usr/bin/perl
PKGADD_NAME = cURL - a client that groks URLs
PKGADD_PKG = HAXXcurl
PKGADD_VENDOR = curl.haxx.se
PKGCONFIG = /usr/bin/pkg-config
RANDOM_FILE = /dev/urandom
RANLIB = ranlib
SED = /bin/sed
SET_MAKE =
SHELL = /bin/sh
STRIP = strip
VERSION = 7.10.5
VERSIONNUM = 070a05
YACC = bison -y
ac_ct_AR = ar
ac_ct_AS =
ac_ct_CC = gcc
ac_ct_CXX = g++
ac_ct_DLLTOOL =
ac_ct_F77 = g77
ac_ct_OBJDUMP =
ac_ct_RANLIB = ranlib
ac_ct_STRIP = strip
am__fastdepCC_FALSE = #
am__fastdepCC_TRUE =
am__fastdepCXX_FALSE = #
am__fastdepCXX_TRUE =
am__include = include
am__quote =
bindir = ${exec_prefix}/bin
build = i686-pc-linux-gnu
build_alias = i686-linux
build_cpu = i686
build_os = linux-gnu
build_vendor = pc
datadir = ${prefix}/share
exec_prefix = ${prefix}
host = i686-pc-linux-gnu
host_alias =
host_cpu = i686
host_os = linux-gnu
host_vendor = pc
includedir = ${prefix}/include
infodir = ${prefix}/info
install_sh = /home/hjw/ns/source/curl/install-sh
libdir = ${exec_prefix}/lib
libexecdir = ${exec_prefix}/libexec
localstatedir = ${prefix}/var
mandir = ${prefix}/man
oldincludedir = /usr/include
prefix = /usr/local
program_transform_name = s,x,x,
sbindir = ${exec_prefix}/sbin
sharedstatedir = ${prefix}/com
sysconfdir = ${prefix}/etc
target_alias =
AUTOMAKE_OPTIONS = foreign
EXTRA_DIST = CHANGES COPYING maketgz SSLCERTS reconf Makefile.dist \
curl-config.in build_vms.com curl-style.el sample.emacs testcurl.sh
bin_SCRIPTS = curl-config
SUBDIRS = docs lib src include tests packages
subdir = .
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
mkinstalldirs = $(SHELL) $(top_srcdir)/mkinstalldirs
CONFIG_HEADER = $(top_builddir)/lib/config.h \
$(top_builddir)/src/config.h \
$(top_builddir)/tests/server/config.h \
$(top_builddir)/lib/ca-bundle.h
CONFIG_CLEAN_FILES = curl-config
SCRIPTS = $(bin_SCRIPTS)
DIST_SOURCES =
RECURSIVE_TARGETS = info-recursive dvi-recursive pdf-recursive \
ps-recursive install-info-recursive uninstall-info-recursive \
all-recursive install-data-recursive install-exec-recursive \
installdirs-recursive install-recursive uninstall-recursive \
check-recursive installcheck-recursive
DIST_COMMON = README COPYING Makefile.am Makefile.in acinclude.m4 \
aclocal.m4 config.guess config.sub configure configure.ac \
curl-config.in depcomp install-sh ltmain.sh missing \
mkinstalldirs
DIST_SUBDIRS = $(SUBDIRS)
all: all-recursive
.SUFFIXES:
am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \
configure.lineno
$(srcdir)/Makefile.in: # Makefile.am $(top_srcdir)/configure.ac $(ACLOCAL_M4)
cd $(top_srcdir) && \
$(AUTOMAKE) --foreign Makefile
Makefile: # $(srcdir)/Makefile.in $(top_builddir)/config.status
cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)
$(top_builddir)/config.status: $(srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
$(SHELL) ./config.status --recheck
$(srcdir)/configure: # $(srcdir)/configure.ac $(ACLOCAL_M4) $(CONFIGURE_DEPENDENCIES)
cd $(srcdir) && $(AUTOCONF)
$(ACLOCAL_M4): # configure.ac acinclude.m4
cd $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS)
curl-config: $(top_builddir)/config.status curl-config.in
cd $(top_builddir) && $(SHELL) ./config.status $@
binSCRIPT_INSTALL = $(INSTALL_SCRIPT)
install-binSCRIPTS: $(bin_SCRIPTS)
@$(NORMAL_INSTALL)
$(mkinstalldirs) $(DESTDIR)$(bindir)
@list='$(bin_SCRIPTS)'; for p in $$list; do \
if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
if test -f $$d$$p; then \
f=`echo "$$p" | sed 's|^.*/||;$(transform)'`; \
echo " $(binSCRIPT_INSTALL) $$d$$p $(DESTDIR)$(bindir)/$$f"; \
$(binSCRIPT_INSTALL) $$d$$p $(DESTDIR)$(bindir)/$$f; \
else :; fi; \
done
uninstall-binSCRIPTS:
@$(NORMAL_UNINSTALL)
@list='$(bin_SCRIPTS)'; for p in $$list; do \
f=`echo "$$p" | sed 's|^.*/||;$(transform)'`; \
echo " rm -f $(DESTDIR)$(bindir)/$$f"; \
rm -f $(DESTDIR)$(bindir)/$$f; \
done
mostlyclean-libtool:
-rm -f *.lo
clean-libtool:
-rm -rf .libs _libs
distclean-libtool:
-rm -f libtool
uninstall-info-am:
# This directory's subdirectories are mostly independent; you can cd
# into them and run `make' without going through this Makefile.
# To change the values of `make' variables: instead of editing Makefiles,
# (1) if the variable is set in `config.status', edit `config.status'
# (which will cause the Makefiles to be regenerated when you run `make');
# (2) otherwise, pass the desired values on the `make' command line.
$(RECURSIVE_TARGETS):
@set fnord $$MAKEFLAGS; amf=$$2; \
dot_seen=no; \
target=`echo $@ | sed s/-recursive//`; \
list='$(SUBDIRS)'; for subdir in $$list; do \
echo "Making $$target in $$subdir"; \
if test "$$subdir" = "."; then \
dot_seen=yes; \
local_target="$$target-am"; \
else \
local_target="$$target"; \
fi; \
(cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \
|| case "$$amf" in *=*) exit 1;; *k*) fail=yes;; *) exit 1;; esac; \
done; \
if test "$$dot_seen" = "no"; then \
$(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \
fi; test -z "$$fail"
mostlyclean-recursive clean-recursive distclean-recursive \
maintainer-clean-recursive:
@set fnord $$MAKEFLAGS; amf=$$2; \
dot_seen=no; \
case "$@" in \
distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \
*) list='$(SUBDIRS)' ;; \
esac; \
rev=''; for subdir in $$list; do \
if test "$$subdir" = "."; then :; else \
rev="$$subdir $$rev"; \
fi; \
done; \
rev="$$rev ."; \
target=`echo $@ | sed s/-recursive//`; \
for subdir in $$rev; do \
echo "Making $$target in $$subdir"; \
if test "$$subdir" = "."; then \
local_target="$$target-am"; \
else \
local_target="$$target"; \
fi; \
(cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \
|| case "$$amf" in *=*) exit 1;; *k*) fail=yes;; *) exit 1;; esac; \
done && test -z "$$fail"
tags-recursive:
list='$(SUBDIRS)'; for subdir in $$list; do \
test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) tags); \
done
ctags-recursive:
list='$(SUBDIRS)'; for subdir in $$list; do \
test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) ctags); \
done
ETAGS = etags
ETAGSFLAGS =
CTAGS = ctags
CTAGSFLAGS =
tags: TAGS
ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES)
list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | \
$(AWK) ' { files[$$0] = 1; } \
END { for (i in files) print i; }'`; \
mkid -fID $$unique
TAGS: tags-recursive $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \
$(TAGS_FILES) $(LISP)
tags=; \
here=`pwd`; \
list='$(SUBDIRS)'; for subdir in $$list; do \
if test "$$subdir" = .; then :; else \
test -f $$subdir/TAGS && tags="$$tags -i $$here/$$subdir/TAGS"; \
fi; \
done; \
list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | \
$(AWK) ' { files[$$0] = 1; } \
END { for (i in files) print i; }'`; \
test -z "$(ETAGS_ARGS)$$tags$$unique" \
|| $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
$$tags $$unique
ctags: CTAGS
CTAGS: ctags-recursive $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \
$(TAGS_FILES) $(LISP)
tags=; \
here=`pwd`; \
list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
unique=`for i in $$list; do \
if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
done | \
$(AWK) ' { files[$$0] = 1; } \
END { for (i in files) print i; }'`; \
test -z "$(CTAGS_ARGS)$$tags$$unique" \
|| $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
$$tags $$unique
GTAGS:
here=`$(am__cd) $(top_builddir) && pwd` \
&& cd $(top_srcdir) \
&& gtags -i $(GTAGS_ARGS) $$here
distclean-tags:
-rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
top_distdir = .
distdir = $(PACKAGE)-$(VERSION)
am__remove_distdir = \
{ test ! -d $(distdir) \
|| { find $(distdir) -type d ! -perm -200 -exec chmod u+w {} ';' \
&& rm -fr $(distdir); }; }
GZIP_ENV = --best
distuninstallcheck_listfiles = find . -type f -print
distcleancheck_listfiles = find . -type f -print
distdir: $(DISTFILES)
$(am__remove_distdir)
mkdir $(distdir)
$(mkinstalldirs) $(distdir)/. $(distdir)/packages/EPM $(distdir)/packages/Linux/RPM
@srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; \
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's|.|.|g'`; \
list='$(DISTFILES)'; for file in $$list; do \
case $$file in \
$(srcdir)/*) file=`echo "$$file" | sed "s|^$$srcdirstrip/||"`;; \
$(top_srcdir)/*) file=`echo "$$file" | sed "s|^$$topsrcdirstrip/|$(top_builddir)/|"`;; \
esac; \
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
dir=`echo "$$file" | sed -e 's,/[^/]*$$,,'`; \
if test "$$dir" != "$$file" && test "$$dir" != "."; then \
dir="/$$dir"; \
$(mkinstalldirs) "$(distdir)$$dir"; \
else \
dir=''; \
fi; \
if test -d $$d/$$file; then \
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \
fi; \
cp -pR $$d/$$file $(distdir)$$dir || exit 1; \
else \
test -f $(distdir)/$$file \
|| cp -p $$d/$$file $(distdir)/$$file \
|| exit 1; \
fi; \
done
list='$(SUBDIRS)'; for subdir in $$list; do \
if test "$$subdir" = .; then :; else \
test -d $(distdir)/$$subdir \
|| mkdir $(distdir)/$$subdir \
|| exit 1; \
(cd $$subdir && \
$(MAKE) $(AM_MAKEFLAGS) \
top_distdir="$(top_distdir)" \
distdir=../$(distdir)/$$subdir \
distdir) \
|| exit 1; \
fi; \
done
$(MAKE) $(AM_MAKEFLAGS) \
top_distdir="$(top_distdir)" distdir="$(distdir)" \
dist-hook
-find $(distdir) -type d ! -perm -777 -exec chmod a+rwx {} \; -o \
! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \
! -type d ! -perm -400 -exec chmod a+r {} \; -o \
! -type d ! -perm -444 -exec $(SHELL) $(install_sh) -c -m a+r {} {} \; \
|| chmod -R a+r $(distdir)
dist-gzip: distdir
$(AMTAR) chof - $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz
$(am__remove_distdir)
dist dist-all: distdir
$(AMTAR) chof - $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz
$(am__remove_distdir)
# This target untars the dist file and tries a VPATH configuration. Then
# it guarantees that the distribution is self-contained by making another
# tarfile.
distcheck: dist
$(am__remove_distdir)
GZIP=$(GZIP_ENV) gunzip -c $(distdir).tar.gz | $(AMTAR) xf -
chmod -R a-w $(distdir); chmod a+w $(distdir)
mkdir $(distdir)/=build
mkdir $(distdir)/=inst
chmod a-w $(distdir)
dc_install_base=`$(am__cd) $(distdir)/=inst && pwd` \
&& dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \
&& cd $(distdir)/=build \
&& ../configure --srcdir=.. --prefix="$$dc_install_base" \
$(DISTCHECK_CONFIGURE_FLAGS) \
&& $(MAKE) $(AM_MAKEFLAGS) \
&& $(MAKE) $(AM_MAKEFLAGS) dvi \
&& $(MAKE) $(AM_MAKEFLAGS) check \
&& $(MAKE) $(AM_MAKEFLAGS) install \
&& $(MAKE) $(AM_MAKEFLAGS) installcheck \
&& $(MAKE) $(AM_MAKEFLAGS) uninstall \
&& $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \
distuninstallcheck \
&& chmod -R a-w "$$dc_install_base" \
&& ({ \
(cd ../.. && $(mkinstalldirs) "$$dc_destdir") \
&& $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \
&& $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \
&& $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \
distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \
} || { rm -rf "$$dc_destdir"; exit 1; }) \
&& rm -rf "$$dc_destdir" \
&& $(MAKE) $(AM_MAKEFLAGS) dist-gzip \
&& rm -f $(distdir).tar.gz \
&& $(MAKE) $(AM_MAKEFLAGS) distcleancheck
$(am__remove_distdir)
@echo "$(distdir).tar.gz is ready for distribution" | \
sed 'h;s/./=/g;p;x;p;x'
distuninstallcheck:
cd $(distuninstallcheck_dir) \
&& test `$(distuninstallcheck_listfiles) | wc -l` -le 1 \
|| { echo "ERROR: files left after uninstall:" ; \
if test -n "$(DESTDIR)"; then \
echo " (check DESTDIR support)"; \
fi ; \
$(distuninstallcheck_listfiles) ; \
exit 1; } >&2
distcleancheck: distclean
if test '$(srcdir)' = . ; then \
echo "ERROR: distcleancheck can only run from a VPATH build" ; \
exit 1 ; \
fi
test `$(distcleancheck_listfiles) | wc -l` -eq 0 \
|| { echo "ERROR: files left in build directory after distclean:" ; \
$(distcleancheck_listfiles) ; \
exit 1; } >&2
check-am: all-am
check: check-recursive
all-am: Makefile $(SCRIPTS)
installdirs: installdirs-recursive
installdirs-am:
$(mkinstalldirs) $(DESTDIR)$(bindir)
install: install-recursive
install-exec: install-exec-recursive
install-data: install-data-recursive
uninstall: uninstall-recursive
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
installcheck: installcheck-recursive
install-strip:
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
INSTALL_STRIP_FLAG=-s \
`test -z '$(STRIP)' || \
echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install
mostlyclean-generic:
clean-generic:
distclean-generic:
-rm -f Makefile $(CONFIG_CLEAN_FILES)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-recursive
clean-am: clean-generic clean-libtool mostlyclean-am
distclean: distclean-recursive
-rm -f $(am__CONFIG_DISTCLEAN_FILES)
distclean-am: clean-am distclean-generic distclean-libtool \
distclean-tags
dvi: dvi-recursive
dvi-am:
info: info-recursive
info-am:
install-data-am:
install-exec-am: install-binSCRIPTS
install-info: install-info-recursive
install-man:
installcheck-am:
maintainer-clean: maintainer-clean-recursive
-rm -f $(am__CONFIG_DISTCLEAN_FILES)
-rm -rf autom4te.cache
maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-recursive
mostlyclean-am: mostlyclean-generic mostlyclean-libtool
pdf: pdf-recursive
pdf-am:
ps: ps-recursive
ps-am:
uninstall-am: uninstall-binSCRIPTS uninstall-info-am
uninstall-info: uninstall-info-recursive
.PHONY: $(RECURSIVE_TARGETS) CTAGS GTAGS all all-am check check-am clean \
clean-generic clean-libtool clean-recursive ctags \
ctags-recursive dist dist-all dist-gzip distcheck distclean \
distclean-generic distclean-libtool distclean-recursive \
distclean-tags distcleancheck distdir distuninstallcheck dvi \
dvi-am dvi-recursive info info-am info-recursive install \
install-am install-binSCRIPTS install-data install-data-am \
install-data-recursive install-exec install-exec-am \
install-exec-recursive install-info install-info-am \
install-info-recursive install-man install-recursive \
install-strip installcheck installcheck-am installdirs \
installdirs-am installdirs-recursive maintainer-clean \
maintainer-clean-generic maintainer-clean-recursive mostlyclean \
mostlyclean-generic mostlyclean-libtool mostlyclean-recursive \
pdf pdf-am pdf-recursive ps ps-am ps-recursive tags \
tags-recursive uninstall uninstall-am uninstall-binSCRIPTS \
uninstall-info-am uninstall-info-recursive uninstall-recursive
# create a root makefile in the distribution:
dist-hook:
rm -rf $(top_builddir)/tests/log
cp $(srcdir)/Makefile.dist $(distdir)/Makefile
html:
cd docs; make html
pdf:
cd docs; make pdf
check: test
test:
@(cd tests; $(MAKE) quiet-test)
test-full:
@(cd tests; $(MAKE) full-test)
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the MPL or the MIT/X-derivate
# licenses. You may pick one of these licenses.
# Build source and binary rpms. For rpm-3.0 and above, the ~/.rpmmacros
# must contain the following line:
# %_topdir /home/loic/local/rpm
# and that /home/loic/local/rpm contains the directory SOURCES, BUILD etc.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
# cd /home/loic/local/rpm ; mkdir -p SOURCES BUILD RPMS/i386 SPECS SRPMS
#
# If additional configure flags are needed to build the package, add the
# following in ~/.rpmmacros
# %configure CFLAGS="%{optflags}" ./configure %{_target_platform} --prefix=%{_prefix} ${AM_CONFIGFLAGS}
# and run make rpm in the following way:
# AM_CONFIGFLAGS='--with-uri=/home/users/loic/local/RedHat-6.2' make rpm
#
# $Id: Makefile.dist,v 1.12 2002/09/07 21:44:02 hartroth Exp $
#############################################################################
all:
./configure
make
rpms:
$(MAKE) RPMDIST=curl rpm
$(MAKE) RPMDIST=curl-ssl rpm
ssl:
./configure --with-ssl
make
rpm:
RPM_TOPDIR=`rpm --showrc | $(PERL) -n -e 'print if(s/.*_topdir\s+(.*)/$$1/)'` ; \
cp $(srcdir)/packages/Linux/RPM/$(RPMDIST).spec $$RPM_TOPDIR/SPECS ; \
cp $(PACKAGE)-$(VERSION).tar.gz $$RPM_TOPDIR/SOURCES ; \
rpm -ba --clean --rmsource $$RPM_TOPDIR/SPECS/$(RPMDIST).spec ; \
mv $$RPM_TOPDIR/RPMS/i386/$(RPMDIST)-*.rpm . ; \
mv $$RPM_TOPDIR/SRPMS/$(RPMDIST)-*.src.rpm .
borland:
cd lib & make -f Makefile.b32
cd src & make -f Makefile.b32
#
# Build a Solaris pkkgadd format file
# run 'make pkgadd' once you've done './configure' and 'make' to make a Solaris pkgadd format
# file (which ends up back in this directory).
# The pkgadd file is in 'pkgtrans' format, so to install on Solaris, do
# pkgadd -d ./HAXXcurl-*
#
mingw32:
cd lib & make -f Makefile.m32 ZLIB=1
cd src & make -f Makefile.m32 ZLIB=1
# gak - libtool requires an absoulte directory, hence the pwd below...
pkgadd:
umask 022 ; \
make install DESTDIR=`/bin/pwd`/packages/Solaris/root ; \
cat COPYING > $(srcdir)/packages/Solaris/copyright ; \
cd $(srcdir)/packages/Solaris && $(MAKE) package
mingw32-ssl:
cd lib & make -f Makefile.m32 SSL=1 ZLIB=1
cd src & make -f Makefile.m32 SSL=1 ZLIB=1
vc:
cd lib
nmake -f Makefile.vc6 cfg=release
cd ..\src
nmake -f Makefile.vc6
vc-ssl:
cd lib
nmake -f Makefile.vc6 cfg=release-ssl
cd ..\src
nmake -f Makefile.vc6 cfg=release-ssl
vc-ssl-dll:
cd lib
nmake -f Makefile.vc6 cfg=release-ssl-dll
cd ..\src
nmake -f Makefile.vc6
cygwin:
./configure
make
cygwin-ssl:
./configure --with-ssl
make
unix: all
unix-ssl: ssl
linux: all
linux-ssl: ssl
#
# Build a cygwin binary tarball installation file
# resulting .tar.bz2 file will end up at packages/Win32/cygwin
cygwinbin:
$(MAKE) -C packages/Win32/cygwin cygwinbin
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT:

View file

@ -2,9 +2,7 @@
# Configuration validation subroutine script.
# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
# 2000, 2001, 2002 Free Software Foundation, Inc.
timestamp='2002-11-30'
# This file is (in principle) common to ALL GNU software.
# The presence of a machine in this file suggests that SOME GNU software
# can handle that machine. It does not imply ALL GNU software can.
@ -28,7 +26,6 @@ timestamp='2002-11-30'
# distribute this file as part of a program that contains a
# configuration script generated by Autoconf, you may include it under
# the same distribution terms that you use for the rest of that program.
# Please send patches to <config-patches@gnu.org>. Submit a context
# diff and a properly formatted ChangeLog entry.
#
@ -36,7 +33,6 @@ timestamp='2002-11-30'
# Supply the specified configuration type as an argument.
# If it is invalid, we print an error message on stderr and exit with code 1.
# Otherwise, we print the canonical config type on stdout and succeed.
# This file is supposed to be the same for all GNU packages
# and recognize all the CPU types, system types and aliases
# that are meaningful with *any* GNU software.
@ -44,16 +40,13 @@ timestamp='2002-11-30'
# it does not support. The user should be able to distinguish
# a failure to support a valid configuration from a meaningless
# configuration.
# The goal of this file is to map all the various variations of a given
# machine specification into a single specification in the form:
# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM
# or in some cases, the newer four-part form:
# CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM
# It is wrong to echo any other type of specification.
me=`echo "$0" | sed -e 's,.*/,,'`
usage="\
Usage: $0 [OPTION] CPU-MFR-OPSYS
$0 [OPTION] ALIAS

View file

@ -1,4 +1,5 @@
#! /bin/sh
#!/bin/bash
echo 1
# Common stub for a few missing GNU programs while installing.
# Copyright 1996, 1997, 1999, 2000 Free Software Foundation, Inc.
# Originally by Fran,cois Pinard <pinard@iro.umontreal.ca>, 1996.

View file

@ -1426,9 +1426,9 @@ puts (
"\n"
" Get a file from a HTTP server that requires user and password, using the\n"
" same proxy as above:\n"
"\n"
);
puts(
" \n"
" curl -u user:passwd -x my-proxy:888 http://www.get.this/\n"
"\n"
" Some proxies require special authentication. Specify by using -U as above:\n"
@ -1467,10 +1467,10 @@ puts (
"\n"
" Upload all data on stdin to a specified ftp site:\n"
"\n"
);
puts(
" curl -T - ftp://ftp.upload.com/myfile\n"
"\n"
);
puts(
" Upload data from a specified file, login with user and password:\n"
"\n"
" curl -T uploadfile -u user:passwd ftp://ftp.upload.com/myfile\n"
@ -1484,9 +1484,9 @@ puts (
"\n"
" curl -T localfile -a ftp://ftp.upload.com/remotefile\n"
"\n"
" Curl also supports ftp upload through a proxy, but only if the proxy is\n"
);
puts(
" Curl also supports ftp upload through a proxy, but only if the proxy is \n"
" configured to allow that kind of tunneling. If it does, you can run curl in\n"
" a fashion similar to:\n"
"\n"
@ -1501,12 +1501,12 @@ puts (
" Note that the http server must've been configured to accept PUT before this\n"
" can be done successfully.\n"
"\n"
);
puts(
" For other ways to do http data upload, see the POST section below.\n"
"\n"
"VERBOSE / DEBUG\n"
"\n"
);
puts(
" If curl fails where it isn't supposed to, if the servers don't let you in,\n"
" if you can't understand the responses: use the -v flag to get verbose\n"
" fetching. Curl will output lots of info and what it sends and receives in\n"
@ -1515,9 +1515,9 @@ puts (
"\n"
" curl -v ftp://ftp.upload.com/\n"
"\n"
" To get even more details and information on what curl does, try using the\n"
);
puts(
" To get even more details and information on what curl does, try using the \n"
" --trace or --trace-ascii options with a given file name to log to, like\n"
" this:\n"
"\n"
@ -1529,11 +1529,11 @@ puts (
" Different protocols provide different ways of getting detailed information\n"
" about specific files/documents. To get curl to show detailed information\n"
" about a single file, you should use -I/--head option. It displays all\n"
);
puts(
" available info on a single file for HTTP and FTP. The HTTP information is a\n"
" lot more extensive.\n"
"\n"
);
puts(
" For HTTP, you can get the header information (the same as -I would show)\n"
" shown before the data by using -i/--include. Curl understands the\n"
" -D/--dump-header option when getting files from both FTP and HTTP, and it\n"
@ -1541,11 +1541,11 @@ puts (
"\n"
" Store the HTTP headers in a separate file (headers.txt in the example):\n"
"\n"
);
puts(
" curl --dump-header headers.txt curl.haxx.se\n"
"\n"
" Note that headers stored in a separate file can be very useful at a later\n"
);
puts(
" time if you want curl to use cookies sent by the server. More about that in\n"
" the cookies section.\n"
"\n"
@ -1557,12 +1557,12 @@ puts (
" Post a simple \"name\" and \"phone\" guestbook.\n"
"\n"
" curl -d \"name=Rafael%20Sagula&phone=3320780\" \\\n"
);
puts(
" http://www.where.com/guest.cgi\n"
"\n"
" How to post a form with curl, lesson #1:\n"
"\n"
);
puts(
" Dig out all the <input> tags in the form that you want to fill in. (There's\n"
" a perl program called formfind.pl on the curl site that helps with this).\n"
"\n"
@ -1572,9 +1572,9 @@ puts (
" <variable1>=<data1>&<variable2>=<data2>&...\n"
"\n"
" The 'variable' names are the names set with \"name=\" in the <input> tags, and\n"
" the data is the contents you want to fill in for the inputs. The data *must*\n"
);
puts(
" the data is the contents you want to fill in for the inputs. The data *must* \n"
" be properly URL encoded. That means you replace space with + and that you\n"
" write weird letters with %XX where XX is the hexadecimal representation of\n"
" the letter's ASCII code.\n"
@ -1586,12 +1586,12 @@ puts (
" <form action=\"post.cgi\" method=\"post\">\n"
" <input name=user size=10>\n"
" <input name=pass type=password size=10>\n"
);
puts(
" <input name=id type=hidden value=\"blablabla\">\n"
" <input name=ding value=\"submit\">\n"
" </form>\n"
"\n"
);
puts(
" We want to enter user 'foobar' with password '12345'.\n"
"\n"
" To post to this, you enter a curl command line like:\n"
@ -1601,41 +1601,41 @@ puts (
"\n"
"\n"
" While -d uses the application/x-www-form-urlencoded mime-type, generally\n"
);
puts(
" understood by CGI's and similar, curl also supports the more capable\n"
" multipart/form-data type. This latter type supports things like file upload.\n"
"\n"
);
puts(
" -F accepts parameters like -F \"name=contents\". If you want the contents to\n"
" be read from a file, use <@filename> as contents. When specifying a file,\n"
" you can also specify the file content type by appending ';type=<mime type>'\n"
" to the file name. You can also post the contents of several files in one\n"
);
puts(
" field. For example, the field name 'coolfiles' is used to send three files,\n"
" with different content types using the following syntax:\n"
"\n"
);
puts(
" curl -F \"coolfiles=@fil1.gif;type=image/gif,fil2.txt,fil3.html\" \\\n"
" http://www.post.com/postit.cgi\n"
"\n"
" If the content-type is not specified, curl will try to guess from the file\n"
" extension (it only knows a few), or use the previously specified type (from\n"
" an earlier file if several files are specified in a list) or else it will\n"
);
puts(
" using the default type 'text/plain'.\n"
"\n"
" Emulate a fill-in form with -F. Let's say you fill in three fields in a\n"
);
puts(
" form. One field is a file name which to post, one field is your name and one\n"
" field is a file description. We want to post the file we have written named\n"
" \"cooltext.txt\". To let curl do the posting of this data instead of your\n"
" favourite browser, you have to read the HTML source of the form page and\n"
);
puts(
" find the names of the input fields. In our example, the input field names\n"
" are 'file', 'yourname' and 'filedescription'.\n"
"\n"
" curl -F \"file=@cooltext.txt\" -F \"yourname=Daniel\" \\\n"
);
puts(
" -F \"filedescription=Cool text file with cool text inside\" \\\n"
" http://www.post.com/postit.cgi\n"
"\n"
@ -1645,8 +1645,6 @@ puts (
" \n"
" curl -F \"pictures=@dog.gif,cat.gif\" \n"
" \n"
);
puts(
" 2. Send two fields with two field names: \n"
"\n"
" curl -F \"docpicture=@dog.gif\" -F \"catpicture=@cat.gif\" \n"
@ -1654,13 +1652,13 @@ puts (
"REFERRER\n"
"\n"
" A HTTP request has the option to include information about which address\n"
);
puts(
" that referred to actual page. Curl allows you to specify the\n"
" referrer to be used on the command line. It is especially useful to\n"
" fool or trick stupid servers or CGI scripts that rely on that information\n"
" being available or contain certain data.\n"
"\n"
);
puts(
" curl -e www.coolsite.com http://www.showme.com/\n"
"\n"
" NOTE: The referer field is defined in the HTTP spec to be a full URL.\n"
@ -1668,6 +1666,8 @@ puts (
"USER AGENT\n"
"\n"
" A HTTP request has the option to include information about the browser\n"
);
puts(
" that generated the request. Curl allows it to be specified on the command\n"
" line. It is especially useful to fool or trick stupid servers or CGI\n"
" scripts that only accept certain browsers.\n"
@ -1676,18 +1676,16 @@ puts (
"\n"
" curl -A 'Mozilla/3.0 (Win95; I)' http://www.nationsbank.com/\n"
"\n"
);
puts(
" Other common strings:\n"
" 'Mozilla/3.0 (Win95; I)' Netscape Version 3 for Windows 95\n"
" 'Mozilla/3.04 (Win95; U)' Netscape Version 3 for Windows 95\n"
" 'Mozilla/2.02 (OS/2; U)' Netscape Version 2 for OS/2\n"
);
puts(
" 'Mozilla/4.04 [en] (X11; U; AIX 4.2; Nav)' NS for AIX\n"
" 'Mozilla/4.05 [en] (X11; U; Linux 2.0.32 i586)' NS for Linux\n"
"\n"
" Note that Internet Explorer tries hard to be compatible in every way:\n"
);
puts(
" 'Mozilla/4.0 (compatible; MSIE 4.01; Windows 95)' MSIE for W95\n"
"\n"
" Mozilla is not the only possible User-Agent name:\n"
@ -1696,21 +1694,21 @@ puts (
"\n"
"COOKIES\n"
"\n"
);
puts(
" Cookies are generally used by web servers to keep state information at the\n"
" client's side. The server sets cookies by sending a response line in the\n"
" headers that looks like 'Set-Cookie: <data>' where the data part then\n"
);
puts(
" typically contains a set of NAME=VALUE pairs (separated by semicolons ';'\n"
" like \"NAME1=VALUE1; NAME2=VALUE2;\"). The server can also specify for what\n"
" path the \"cookie\" should be used for (by specifying \"path=value\"), when the\n"
);
puts(
" cookie should expire (\"expire=DATE\"), for what domain to use it\n"
" (\"domain=NAME\") and if it should be used on secure connections only\n"
" (\"secure\").\n"
"\n"
" If you've received a page from a server that contains a header like:\n"
);
puts(
" Set-Cookie: sessionid=boo123; path=\"/foo\";\n"
"\n"
" it means the server wants that first pair passed on when we get anything in\n"
@ -1720,12 +1718,12 @@ puts (
"\n"
" curl -b \"name=Daniel\" www.sillypage.com\n"
"\n"
);
puts(
" Curl also has the ability to use previously received cookies in following\n"
" sessions. If you get cookies from a server and store them in a file in a\n"
" manner similar to:\n"
"\n"
);
puts(
" curl --dump-header headers www.example.com\n"
"\n"
" ... you can then in a second connect to that (or another) site, use the\n"
@ -1734,12 +1732,12 @@ puts (
" curl -b headers www.example.com\n"
"\n"
" While saving headers to a file is a working way to store cookies, it is\n"
);
puts(
" however error-prone and not the prefered way to do this. Instead, make curl\n"
" save the incoming cookies using the well-known netscape cookie format like\n"
" this:\n"
"\n"
);
puts(
" curl -c cookies.txt www.example.com\n"
"\n"
" Note that by specifying -b you enable the \"cookie awareness\" and with -L\n"
@ -1747,22 +1745,22 @@ puts (
" with cookies). So that if a site sends cookies and a location, you can\n"
" use a non-existing file to trigger the cookie awareness like:\n"
"\n"
);
puts(
" curl -L -b empty.txt www.example.com\n"
"\n"
" The file to read cookies from must be formatted using plain HTTP headers OR\n"
);
puts(
" as netscape's cookie file. Curl will determine what kind it is based on the\n"
" file contents. In the above command, curl will parse the header and store\n"
" the cookies received from www.example.com. curl will send to the server the\n"
" stored cookies which match the request as it follows the location. The\n"
" file \"empty.txt\" may be a non-existant file.\n"
"\n"
);
puts(
" Alas, to both read and write cookies from a netscape cookie file, you can\n"
" set both -b and -c to use the same file:\n"
"\n"
);
puts(
" curl -b cookies.txt -c cookies.txt www.example.com\n"
"\n"
"PROGRESS METER\n"
@ -1771,61 +1769,61 @@ puts (
" happening. The different fields in the output have the following meaning:\n"
"\n"
" % Total % Received % Xferd Average Speed Time Curr.\n"
);
puts(
" Dload Upload Total Current Left Speed\n"
" 0 151M 0 38608 0 0 9406 0 4:41:43 0:00:04 4:41:39 9287\n"
"\n"
" From left-to-right:\n"
);
puts(
" % - percentage completed of the whole transfer\n"
" Total - total size of the whole expected transfer\n"
" % - percentage completed of the download\n"
" Received - currently downloaded amount of bytes\n"
" % - percentage completed of the upload\n"
);
puts(
" Xferd - currently uploaded amount of bytes\n"
" Average Speed\n"
" Dload - the average transfer speed of the download\n"
" Average Speed\n"
);
puts(
" Upload - the average transfer speed of the upload\n"
" Time Total - expected time to complete the operation\n"
" Time Current - time passed since the invoke\n"
" Time Left - expected time left to completetion\n"
" Curr.Speed - the average transfer speed the last 5 seconds (the first\n"
);
puts(
" 5 seconds of a transfer is based on less time of course.)\n"
"\n"
" The -# option will display a totally different progress bar that doesn't\n"
" need much explanation!\n"
"\n"
"SPEED LIMIT\n"
);
puts(
"\n"
" Curl allows the user to set the transfer speed conditions that must be met\n"
" to let the transfer keep going. By using the switch -y and -Y you\n"
" can make curl abort transfers if the transfer speed is below the specified\n"
" lowest limit for a specified time.\n"
"\n"
);
puts(
" To have curl abort the download if the speed is slower than 3000 bytes per\n"
" second for 1 minute, run:\n"
"\n"
" curl -Y 3000 -y 60 www.far-away-site.com\n"
"\n"
);
puts(
" This can very well be used in combination with the overall time limit, so\n"
" that the above operatioin must be completed in whole within 30 minutes:\n"
"\n"
" curl -m 1800 -Y 3000 -y 60 www.far-away-site.com\n"
"\n"
" Forcing curl not to transfer data faster than a given rate is also possible,\n"
);
puts(
" which might be useful if you're using a limited bandwidth connection and you\n"
" don't want your transfer to use all of it (sometimes referred to as\n"
" \"bandwith throttle\").\n"
"\n"
);
puts(
" Make curl transfer data no faster than 10 kilobytes per second:\n"
"\n"
" curl --limit-rate 10K www.far-away-site.com\n"
@ -1838,10 +1836,10 @@ puts (
"\n"
" curl -T upload --limit-rate 1M ftp://uploadshereplease.com\n"
"\n"
" When using the --limit-rate option, the transfer rate is regulated on a \n"
" per-second basis, which will cause the total transfer speed to become lower \n"
);
puts(
" When using the --limit-rate option, the transfer rate is regulated on a\n"
" per-second basis, which will cause the total transfer speed to become lower\n"
" than the given number. Sometimes of course substantially lower, if your\n"
" transfer stalls during periods.\n"
"\n"
@ -1851,10 +1849,10 @@ puts (
" systems) from the user's home dir on startup.\n"
"\n"
" The config file could be made up with normal command line switches, but you\n"
" can also specify the long options without the dashes to make it more \n"
" readable. You can separate the options and the parameter with spaces, or \n"
);
puts(
" can also specify the long options without the dashes to make it more\n"
" readable. You can separate the options and the parameter with spaces, or\n"
" with = or :. Comments can be used within the file. If the first letter on a\n"
" line is a '#'-letter the rest of the line is treated as a comment.\n"
"\n"
@ -1862,13 +1860,13 @@ puts (
" parameter within double quotes (\"). Within those quotes, you specify a\n"
" quote as \\\".\n"
"\n"
);
puts(
" NOTE: You must specify options and their arguments on the same line.\n"
"\n"
" Example, set default time out and proxy in a config file:\n"
"\n"
" # We want a 30 minute timeout:\n"
);
puts(
" -m 1800\n"
" # ... and we use a proxy for all accesses:\n"
" proxy = proxy.our.domain.com:8080\n"
@ -1877,13 +1875,13 @@ puts (
" leading up to the first characters of each line are ignored.\n"
"\n"
" Prevent curl from reading the default file by using -q as the first command\n"
);
puts(
" line parameter, like:\n"
"\n"
" curl -q www.thatsite.com\n"
"\n"
" Force curl to get and display a local help page in case it is invoked\n"
);
puts(
" without URL by making a config file similar to:\n"
"\n"
" # default url to get\n"
@ -1892,14 +1890,14 @@ puts (
" You can specify another config file to be read by using the -K/--config\n"
" flag. If you set config file name to \"-\" it'll read the config from stdin,\n"
" which can be handy if you want to hide options from being visible in process\n"
);
puts(
" tables etc:\n"
"\n"
" echo \"user = user:passwd\" | curl -K - http://that.secret.site.com\n"
"\n"
"EXTRA HEADERS\n"
"\n"
);
puts(
" When using curl in your own very special programs, you may end up needing\n"
" to pass on your own custom headers when getting a web page. You can do\n"
" this by using the -H flag.\n"
@ -1910,9 +1908,9 @@ puts (
" curl -H \"X-you-and-me: yes\" www.love.com\n"
"\n"
" This can also be useful in case you want curl to send a different text in a\n"
" header than it normally does. The -H header you specify then replaces the \n"
);
puts(
" header than it normally does. The -H header you specify then replaces the\n"
" header curl would normally send. If you replace an internal header with an\n"
" empty one, you prevent that header from being sent. To prevent the Host:\n"
" header from being used:\n"
@ -1925,10 +1923,10 @@ puts (
" relative the directory you enter. To get the file 'README' from your home\n"
" directory at your ftp site, do:\n"
"\n"
" curl ftp://user:passwd@my.site.com/README \n"
" \n"
);
puts(
" curl ftp://user:passwd@my.site.com/README\n"
"\n"
" But if you want the README file from the root directory of that very same\n"
" site, you need to specify the absolute file name:\n"
"\n"
@ -2067,9 +2065,9 @@ puts (
"\n"
" curl -C - -o file ftp://ftp.server.com/path/file\n"
"\n"
" Continue uploading a document(*1):\n"
);
puts(
" Continue uploading a document(*1): \n"
"\n"
" curl -C - -T file ftp://ftp.server.com/path/file\n"
"\n"
@ -2085,9 +2083,9 @@ puts (
"\n"
"TIME CONDITIONS\n"
"\n"
" HTTP allows a client to specify a time condition for the document it\n"
);
puts(
" HTTP allows a client to specify a time condition for the document it \n"
" requests. It is If-Modified-Since or If-Unmodified-Since. Curl allow you to\n"
" specify them with the -z/--time-cond flag.\n"
"\n"
@ -2097,10 +2095,10 @@ puts (
" curl -z local.html http://remote.server.com/remote.html\n"
"\n"
" Or you can download a file only if the local file is newer than the remote\n"
);
puts(
" one. Do this by prepending the date string with a '-', as in:\n"
"\n"
);
puts(
" curl -z -local.html http://remote.server.com/remote.html\n"
"\n"
" You can specify a \"free text\" date as condition. Tell curl to only download\n"
@ -2115,10 +2113,10 @@ puts (
"\n"
" For fun try\n"
"\n"
);
puts(
" curl dict://dict.org/m:curl\n"
" curl dict://dict.org/d:heisenbug:jargon\n"
);
puts(
" curl dict://dict.org/d:daniel:web1913\n"
"\n"
" Aliases for 'm' are 'match' and 'find', and aliases for 'd' are 'define'\n"
@ -2132,12 +2130,12 @@ puts (
" curl dict://dict.org/show:db\n"
" curl dict://dict.org/show:strat\n"
"\n"
);
puts(
" Authentication is still missing (but this is not required by the RFC)\n"
"\n"
"LDAP\n"
"\n"
);
puts(
" If you have installed the OpenLDAP library, curl can take advantage of it\n"
" and offer ldap:// support.\n"
"\n"
@ -2147,10 +2145,10 @@ puts (
"\n"
" Netscape's \"Netscape Directory SDK 3.0 for C Programmer's Guide Chapter 10:\n"
" Working with LDAP URLs\":\n"
);
puts(
" http://developer.netscape.com/docs/manuals/dirsdk/csdk30/url.htm\n"
"\n"
);
puts(
" RFC 2255, \"The LDAP URL Format\" http://www.rfc-editor.org/rfc/rfc2255.txt\n"
"\n"
" To show you an example, this is now I can get all people from my local LDAP\n"
@ -2162,11 +2160,11 @@ puts (
" (enforce ASCII) flag.\n"
"\n"
"ENVIRONMENT VARIABLES\n"
);
puts(
"\n"
" Curl reads and understands the following environment variables:\n"
"\n"
);
puts(
" http_proxy, HTTPS_PROXY, FTP_PROXY, GOPHER_PROXY\n"
"\n"
" They should be set for protocol-specific proxies. General proxy should be\n"
@ -2180,11 +2178,11 @@ puts (
" NO_PROXY\n"
"\n"
" If a tail substring of the domain-path for a host matches one of these\n"
);
puts(
" strings, transactions with that node will not be proxied.\n"
"\n"
"\n"
);
puts(
" The usage of the -x/--proxy flag overrides the environment variables.\n"
"\n"
"NETRC\n"
@ -2193,9 +2191,9 @@ puts (
" to specify name and password for commonly visited ftp sites in a file so\n"
" that you don't have to type them in each time you visit those sites. You\n"
" realize this is a big security risk if someone else gets hold of your\n"
" passwords, so therefor most unix programs won't read this file unless it is\n"
);
puts(
" passwords, so therefor most unix programs won't read this file unless it is \n"
" only readable by yourself (curl doesn't care though).\n"
"\n"
" Curl supports .netrc files if told so (using the -n/--netrc and\n"
@ -2208,9 +2206,9 @@ puts (
"\n"
"CUSTOM OUTPUT\n"
"\n"
" To better allow script programmers to get to know about the progress of\n"
);
puts(
" To better allow script programmers to get to know about the progress of \n"
" curl, the -w/--write-out option was introduced. Using this, you can specify\n"
" what information from the previous transfer you want to extract.\n"
"\n"
@ -2222,10 +2220,10 @@ puts (
"KERBEROS4 FTP TRANSFER\n"
"\n"
" Curl supports kerberos4 for FTP transfers. You need the kerberos package\n"
);
puts(
" installed and used at curl build time for it to be used.\n"
"\n"
);
puts(
" First, get the krb-ticket the normal way, like with the kauth tool. Then use\n"
" curl in way similar to:\n"
"\n"
@ -2237,9 +2235,9 @@ puts (
"TELNET\n"
"\n"
" The curl telnet support is basic and very easy to use. Curl passes all data\n"
" passed to it on stdin to the remote server. Connect to a remote telnet\n"
);
puts(
" passed to it on stdin to the remote server. Connect to a remote telnet \n"
" server using a command line similar to:\n"
"\n"
" curl telnet://remote.server.com\n"
@ -2251,10 +2249,10 @@ puts (
" for slow connections or similar.\n"
"\n"
" Pass options to the telnet protocol negotiation, by using the -t option. To\n"
);
puts(
" tell the server we use a vt100 terminal, try something like:\n"
"\n"
);
puts(
" curl -tTTYPE=vt100 telnet://remote.server.com\n"
"\n"
" Other interesting options for it -t include:\n"
@ -2265,37 +2263,37 @@ puts (
"\n"
" NOTE: the telnet protocol does not specify any way to login with a specified\n"
" user and password so curl can't do that automatically. To do that, you need\n"
);
puts(
" to track when the login prompt is received and send the username and\n"
" password accordingly.\n"
"\n"
"PERSISTANT CONNECTIONS\n"
"\n"
);
puts(
" Specifying multiple files on a single command line will make curl transfer\n"
" all of them, one after the other in the specified order.\n"
"\n"
" libcurl will attempt to use persistant connections for the transfers so that\n"
" the second transfer to the same host can use the same connection that was\n"
" already initiated and was left open in the previous transfer. This greatly\n"
);
puts(
" decreases connection time for all but the first transfer and it makes a far\n"
" better use of the network.\n"
"\n"
);
puts(
" Note that curl cannot use persistant connections for transfers that are used\n"
" in subsequence curl invokes. Try to stuff as many URLs as possible on the\n"
" same command line if they are using the same host, as that'll make the\n"
" transfers faster. If you use a http proxy for file transfers, practicly\n"
" all transfers will be persistant.\n"
"\n"
);
puts(
" Persistant connections were introduced in curl 7.7.\n"
"\n"
"MAILING LISTS\n"
"\n"
" For your convenience, we have several open mailing lists to discuss curl,\n"
);
puts(
" its development and things relevant to this. Get all info at\n"
" http://curl.haxx.se/mail/. The lists available are:\n"
"\n"
@ -2307,8 +2305,6 @@ puts (
"\n"
" curl-library\n"
"\n"
);
puts(
" Developers using or developing libcurl. Bugs, extensions, improvements.\n"
"\n"
" curl-announce\n"
@ -2317,6 +2313,8 @@ puts (
"\n"
" curl-and-PHP\n"
"\n"
);
puts(
" Using the curl functions in PHP. Everything curl with a PHP angle. Or PHP\n"
" with a curl angle.\n"
"\n"
@ -2324,14 +2322,14 @@ puts (
"\n"
" Receives notifications on all CVS commits done to the curl source module.\n"
" This can become quite a large amount of mails during intense development,\n"
);
puts(
" be aware. This is for us who like email...\n"
"\n"
" curl-www-commits\n"
"\n"
" Receives notifications on all CVS commits done to the curl www module\n"
" (basicly the web site). This can become quite a large amount of mails\n"
);
puts(
" during intense changing, be aware. This is for us who like email...\n"
"\n"
" Please direct curl questions, feature requests and trouble reports to one of\n"