libs: updated to cURL 7.67

This commit is contained in:
Remy Marquis 2019-12-20 12:07:02 +01:00
parent 2a1300b32d
commit c4d38c2dcf
1309 changed files with 48231 additions and 32676 deletions

13341
curl/CHANGES

File diff suppressed because it is too large Load diff

View file

@ -29,7 +29,7 @@ if(CURL_HIDDEN_SYMBOLS)
set(_CFLAG_SYMBOLS_HIDE "-xldscope=hidden")
elseif(CMAKE_C_COMPILER_ID MATCHES "Intel" AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 9.0)
# note: this should probably just check for version 9.1.045 but I'm not 100% sure
# so let's to it the same way autotools do.
# so let's do it the same way autotools do.
set(SUPPORTS_SYMBOL_HIDING TRUE)
set(_SYMBOL_EXTERN "__attribute__ ((__visibility__ (\"default\")))")
set(_CFLAG_SYMBOLS_HIDE "-fvisibility=hidden")
@ -53,7 +53,7 @@ elseif(MSVC)
message(WARNING "Hiding private symbols regardless CURL_HIDDEN_SYMBOLS being disabled.")
set(HIDES_CURL_PRIVATE_SYMBOLS TRUE)
endif()
elseif()
else()
set(HIDES_CURL_PRIVATE_SYMBOLS FALSE)
endif()

View file

@ -5,7 +5,7 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2014, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
@ -125,6 +125,7 @@ int main(void)
#if defined(HAVE_GETHOSTBYADDR_R_5) || \
defined(HAVE_GETHOSTBYADDR_R_5_REENTRANT)
rc = gethostbyaddr_r(address, length, type, &h, &hdata);
(void)rc;
#elif defined(HAVE_GETHOSTBYADDR_R_7) || \
defined(HAVE_GETHOSTBYADDR_R_7_REENTRANT)
hp = gethostbyaddr_r(address, length, type, &h, buffer, 8192, &h_errnop);
@ -132,6 +133,7 @@ int main(void)
#elif defined(HAVE_GETHOSTBYADDR_R_8) || \
defined(HAVE_GETHOSTBYADDR_R_8_REENTRANT)
rc = gethostbyaddr_r(address, length, type, &h, buffer, 8192, &hp, &h_errnop);
(void)rc;
#endif
#if defined(HAVE_GETHOSTBYNAME_R_3) || \
@ -240,6 +242,7 @@ int main()
#ifndef inet_ntoa_r
func_type func;
func = (func_type)inet_ntoa_r;
(void)func;
#endif
return 0;
}
@ -255,6 +258,7 @@ int main()
#ifndef inet_ntoa_r
func_type func;
func = (func_type)&inet_ntoa_r;
(void)func;
#endif
return 0;
}
@ -553,8 +557,8 @@ main() {
#include <time.h>
int
main() {
struct timespec ts = {0, 0};
clock_gettime(CLOCK_MONOTONIC, &ts);
struct timespec ts = {0, 0};
clock_gettime(CLOCK_MONOTONIC, &ts);
return 0;
}
#endif
@ -565,3 +569,49 @@ main() {
return 0;
}
#endif
#ifdef HAVE_VARIADIC_MACROS_C99
#define c99_vmacro3(first, ...) fun3(first, __VA_ARGS__)
#define c99_vmacro2(first, ...) fun2(first, __VA_ARGS__)
int fun3(int arg1, int arg2, int arg3);
int fun2(int arg1, int arg2);
int fun3(int arg1, int arg2, int arg3) {
return arg1 + arg2 + arg3;
}
int fun2(int arg1, int arg2) {
return arg1 + arg2;
}
int
main() {
int res3 = c99_vmacro3(1, 2, 3);
int res2 = c99_vmacro2(1, 2);
(void)res3;
(void)res2;
return 0;
}
#endif
#ifdef HAVE_VARIADIC_MACROS_GCC
#define gcc_vmacro3(first, args...) fun3(first, args)
#define gcc_vmacro2(first, args...) fun2(first, args)
int fun3(int arg1, int arg2, int arg3);
int fun2(int arg1, int arg2);
int fun3(int arg1, int arg2, int arg3) {
return arg1 + arg2 + arg3;
}
int fun2(int arg1, int arg2) {
return arg1 + arg2;
}
int
main() {
int res3 = gcc_vmacro3(1, 2, 3);
int res2 = gcc_vmacro2(1, 2);
(void)res3;
(void)res2;
return 0;
}
#endif

View file

@ -62,6 +62,7 @@ if(NOT _GSS_FOUND) #not found by pkg-config. Let's take more traditional approac
COMMAND ${_GSS_CONFIGURE_SCRIPT} "--cflags" "gssapi"
OUTPUT_VARIABLE _GSS_CFLAGS
RESULT_VARIABLE _GSS_CONFIGURE_FAILED
OUTPUT_STRIP_TRAILING_WHITESPACE
)
message(STATUS "CFLAGS: ${_GSS_CFLAGS}")
if(NOT _GSS_CONFIGURE_FAILED) # 0 means success
@ -84,6 +85,7 @@ if(NOT _GSS_FOUND) #not found by pkg-config. Let's take more traditional approac
COMMAND ${_GSS_CONFIGURE_SCRIPT} "--libs" "gssapi"
OUTPUT_VARIABLE _GSS_LIB_FLAGS
RESULT_VARIABLE _GSS_CONFIGURE_FAILED
OUTPUT_STRIP_TRAILING_WHITESPACE
)
message(STATUS "LDFLAGS: ${_GSS_LIB_FLAGS}")
@ -110,6 +112,7 @@ if(NOT _GSS_FOUND) #not found by pkg-config. Let's take more traditional approac
COMMAND ${_GSS_CONFIGURE_SCRIPT} "--version"
OUTPUT_VARIABLE _GSS_VERSION
RESULT_VARIABLE _GSS_CONFIGURE_FAILED
OUTPUT_STRIP_TRAILING_WHITESPACE
)
# older versions may not have the "--version" parameter. In this case we just don't care.
@ -121,6 +124,7 @@ if(NOT _GSS_FOUND) #not found by pkg-config. Let's take more traditional approac
COMMAND ${_GSS_CONFIGURE_SCRIPT} "--vendor"
OUTPUT_VARIABLE _GSS_VENDOR
RESULT_VARIABLE _GSS_CONFIGURE_FAILED
OUTPUT_STRIP_TRAILING_WHITESPACE
)
# older versions may not have the "--vendor" parameter. In this case we just don't care.
@ -134,7 +138,7 @@ if(NOT _GSS_FOUND) #not found by pkg-config. Let's take more traditional approac
endif()
endif()
else() # either there is no config script or we are on platform that doesn't provide one (Windows?)
else() # either there is no config script or we are on a platform that doesn't provide one (Windows?)
find_path(_GSS_INCLUDE_DIR
NAMES
@ -164,7 +168,7 @@ if(NOT _GSS_FOUND) #not found by pkg-config. Let's take more traditional approac
set(CMAKE_REQUIRED_DEFINITIONS "")
endif()
else()
# I'm not convienced if this is the right way but this is what autotools do at the moment
# I'm not convinced if this is the right way but this is what autotools do at the moment
find_path(_GSS_INCLUDE_DIR
NAMES
"gssapi.h"

View file

@ -24,6 +24,8 @@ else()
add_header_include(HAVE_SYS_SOCKET_H "sys/socket.h")
endif()
set(CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY)
check_c_source_compiles("${_source_epilogue}
int main(void) {
recv(0, 0, 0, 0);
@ -177,23 +179,6 @@ int main(void) {
return 0;
}" HAVE_STRUCT_TIMEVAL)
include(CheckCSourceRuns)
# See HAVE_POLL in CMakeLists.txt for why poll is disabled on macOS
if(NOT APPLE)
set(CMAKE_REQUIRED_FLAGS)
if(HAVE_SYS_POLL_H)
set(CMAKE_REQUIRED_FLAGS "-DHAVE_SYS_POLL_H")
endif()
check_c_source_runs("
#ifdef HAVE_SYS_POLL_H
# include <sys/poll.h>
#endif
int main(void) {
return poll((void *)0, 0, 10 /*ms*/);
}" HAVE_POLL_FINE)
endif()
set(HAVE_SIG_ATOMIC_T 1)
set(CMAKE_REQUIRED_FLAGS)
if(HAVE_SIGNAL_H)
@ -229,3 +214,51 @@ check_type_size("struct sockaddr_storage" SIZEOF_STRUCT_SOCKADDR_STORAGE)
if(HAVE_SIZEOF_STRUCT_SOCKADDR_STORAGE)
set(HAVE_STRUCT_SOCKADDR_STORAGE 1)
endif()
unset(CMAKE_TRY_COMPILE_TARGET_TYPE)
if(NOT DEFINED CMAKE_TOOLCHAIN_FILE)
# if not cross-compilation...
include(CheckCSourceRuns)
set(CMAKE_REQUIRED_FLAGS "")
if(HAVE_SYS_POLL_H)
set(CMAKE_REQUIRED_FLAGS "-DHAVE_SYS_POLL_H")
elseif(HAVE_POLL_H)
set(CMAKE_REQUIRED_FLAGS "-DHAVE_POLL_H")
endif()
check_c_source_runs("
#include <stdlib.h>
#include <sys/time.h>
#ifdef HAVE_SYS_POLL_H
# include <sys/poll.h>
#elif HAVE_POLL_H
# include <poll.h>
#endif
int main(void)
{
if(0 != poll(0, 0, 10)) {
return 1; /* fail */
}
else {
/* detect the 10.12 poll() breakage */
struct timeval before, after;
int rc;
size_t us;
gettimeofday(&before, NULL);
rc = poll(NULL, 0, 500);
gettimeofday(&after, NULL);
us = (after.tv_sec - before.tv_sec) * 1000000 +
(after.tv_usec - before.tv_usec);
if(us < 400000) {
return 1;
}
}
return 0;
}" HAVE_POLL_FINE)
endif()

View file

@ -7,7 +7,6 @@ if(NOT UNIX)
set(HAVE_LIBNSL 0)
set(HAVE_GETHOSTNAME 1)
set(HAVE_LIBZ 0)
set(HAVE_LIBCRYPTO 0)
set(HAVE_DLOPEN 0)

View file

@ -5,7 +5,7 @@
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 1998 - 2018, Daniel Stenberg, <daniel@haxx.se>, et al.
# Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
@ -57,7 +57,6 @@ string(REGEX MATCH "#define LIBCURL_VERSION_NUM 0x[0-9a-fA-F]+"
CURL_VERSION_NUM ${CURL_VERSION_H_CONTENTS})
string(REGEX REPLACE "[^0]+0x" "" CURL_VERSION_NUM ${CURL_VERSION_NUM})
include_regular_expression("^.*$") # Sukender: Is it necessary?
# Setup package meta-data
# SET(PACKAGE "curl")
@ -70,7 +69,6 @@ message(STATUS "curl version=[${CURL_VERSION}]")
set(OPERATING_SYSTEM "${CMAKE_SYSTEM_NAME}")
set(OS "\"${CMAKE_SYSTEM_NAME}\"")
include_directories(${PROJECT_BINARY_DIR}/include/curl)
include_directories(${CURL_SOURCE_DIR}/include)
option(CURL_WERROR "Turn compiler warnings into errors" OFF)
@ -105,11 +103,7 @@ endif()
if(ENABLE_DEBUG)
# DEBUGBUILD will be defined only for Debug builds
if(NOT CMAKE_VERSION VERSION_LESS 3.0)
set_property(DIRECTORY APPEND PROPERTY COMPILE_DEFINITIONS $<$<CONFIG:Debug>:DEBUGBUILD>)
else()
set_property(DIRECTORY APPEND PROPERTY COMPILE_DEFINITIONS_DEBUG DEBUGBUILD)
endif()
set_property(DIRECTORY APPEND PROPERTY COMPILE_DEFINITIONS $<$<CONFIG:Debug>:DEBUGBUILD>)
set(ENABLE_CURLDEBUG ON)
endif()
@ -247,7 +241,6 @@ include(CheckLibraryExists)
include(CheckSymbolExists)
include(CheckTypeSize)
include(CheckCSourceCompiles)
include(CMakeDependentOption)
# On windows preload settings
if(WIN32)
@ -267,7 +260,7 @@ if(ENABLE_THREADED_RESOLVER)
endif()
# Check for all needed libraries
check_library_exists_concat("dl" dlopen HAVE_LIBDL)
check_library_exists_concat("${CMAKE_DL_LIBS}" dlopen HAVE_LIBDL)
check_library_exists_concat("socket" connect HAVE_LIBSOCKET)
check_library_exists("c" gethostbyname "" NOT_NEED_LIBNSL)
@ -294,7 +287,7 @@ endif()
# TODO support GNUTLS, NSS, POLARSSL, CYASSL
if(APPLE)
option(CMAKE_USE_DARWINSSL "enable Apple OS native SSL/TLS" OFF)
option(CMAKE_USE_SECTRANSP "enable Apple OS native SSL/TLS" OFF)
endif()
if(WIN32)
option(CMAKE_USE_WINSSL "enable Windows native SSL/TLS" OFF)
@ -304,14 +297,14 @@ endif()
option(CMAKE_USE_MBEDTLS "Enable mbedTLS for SSL/TLS" OFF)
set(openssl_default ON)
if(WIN32 OR CMAKE_USE_DARWINSSL OR CMAKE_USE_WINSSL OR CMAKE_USE_MBEDTLS)
if(WIN32 OR CMAKE_USE_SECTRANSP OR CMAKE_USE_WINSSL OR CMAKE_USE_MBEDTLS)
set(openssl_default OFF)
endif()
option(CMAKE_USE_OPENSSL "Use OpenSSL code. Experimental" ${openssl_default})
count_true(enabled_ssl_options_count
CMAKE_USE_WINSSL
CMAKE_USE_DARWINSSL
CMAKE_USE_SECTRANSP
CMAKE_USE_OPENSSL
CMAKE_USE_MBEDTLS
)
@ -331,6 +324,10 @@ if(CURL_WINDOWS_SSPI)
endif()
if(CMAKE_USE_DARWINSSL)
message(FATAL_ERROR "The cmake option CMAKE_USE_DARWINSSL was renamed to CMAKE_USE_SECTRANSP.")
endif()
if(CMAKE_USE_SECTRANSP)
find_library(COREFOUNDATION_FRAMEWORK "CoreFoundation")
if(NOT COREFOUNDATION_FRAMEWORK)
message(FATAL_ERROR "CoreFoundation framework not found")
@ -342,7 +339,7 @@ if(CMAKE_USE_DARWINSSL)
endif()
set(SSL_ENABLED ON)
set(USE_DARWINSSL ON)
set(USE_SECTRANSP ON)
list(APPEND CURL_LIBS "${COREFOUNDATION_FRAMEWORK}" "${SECURITY_FRAMEWORK}")
endif()
@ -350,8 +347,6 @@ if(CMAKE_USE_OPENSSL)
find_package(OpenSSL REQUIRED)
set(SSL_ENABLED ON)
set(USE_OPENSSL ON)
set(HAVE_LIBCRYPTO ON)
set(HAVE_LIBSSL ON)
# Depend on OpenSSL via imported targets if supported by the running
# version of CMake. This allows our dependents to get our dependencies
@ -476,6 +471,7 @@ if(NOT CURL_DISABLE_LDAP)
list(APPEND CMAKE_REQUIRED_LIBRARIES ${CMAKE_LBER_LIB})
endif()
check_c_source_compiles("${_SRC_STRING}" NOT_NEED_LBER_H)
unset(CMAKE_REQUIRED_LIBRARIES)
if(NOT_NEED_LBER_H)
set(NEED_LBER_H OFF)
@ -532,7 +528,7 @@ endif()
option(CURL_BROTLI "Set to ON to enable building curl with brotli support." OFF)
set(HAVE_BROTLI OFF)
if(CURL_BROTLI)
find_package(BROTLI QUIET)
find_package(Brotli QUIET)
if(BROTLI_FOUND)
set(HAVE_BROTLI ON)
list(APPEND CURL_LIBS ${BROTLI_LIBRARIES})
@ -571,6 +567,7 @@ if(CMAKE_USE_LIBSSH2)
check_function_exists(libssh2_scp_send64 HAVE_LIBSSH2_SCP_SEND64)
check_function_exists(libssh2_session_handshake HAVE_LIBSSH2_SESSION_HANDSHAKE)
set(CMAKE_EXTRA_INCLUDE_FILES "")
unset(CMAKE_REQUIRED_LIBRARIES)
endif()
endif()
@ -618,6 +615,7 @@ if(CMAKE_USE_GSSAPI)
if(NOT HAVE_GSS_C_NT_HOSTBASED_SERVICE)
set(HAVE_OLD_GSSMIT ON)
endif()
unset(CMAKE_REQUIRED_LIBRARIES)
endif()
@ -828,12 +826,8 @@ endif()
check_symbol_exists(basename "${CURL_INCLUDES}" HAVE_BASENAME)
check_symbol_exists(socket "${CURL_INCLUDES}" HAVE_SOCKET)
# poll on macOS is unreliable, it first did not exist, then was broken until
# fixed in 10.9 only to break again in 10.12.
if(NOT APPLE)
check_symbol_exists(poll "${CURL_INCLUDES}" HAVE_POLL)
endif()
check_symbol_exists(select "${CURL_INCLUDES}" HAVE_SELECT)
check_symbol_exists(poll "${CURL_INCLUDES}" HAVE_POLL)
check_symbol_exists(strdup "${CURL_INCLUDES}" HAVE_STRDUP)
check_symbol_exists(strstr "${CURL_INCLUDES}" HAVE_STRSTR)
check_symbol_exists(strtok_r "${CURL_INCLUDES}" HAVE_STRTOK_R)
@ -864,6 +858,7 @@ check_symbol_exists(strlcat "${CURL_INCLUDES}" HAVE_STRLCAT)
check_symbol_exists(getpwuid "${CURL_INCLUDES}" HAVE_GETPWUID)
check_symbol_exists(getpwuid_r "${CURL_INCLUDES}" HAVE_GETPWUID_R)
check_symbol_exists(geteuid "${CURL_INCLUDES}" HAVE_GETEUID)
check_symbol_exists(usleep "${CURL_INCLUDES}" HAVE_USLEEP)
check_symbol_exists(utime "${CURL_INCLUDES}" HAVE_UTIME)
check_symbol_exists(gmtime_r "${CURL_INCLUDES}" HAVE_GMTIME_R)
check_symbol_exists(localtime_r "${CURL_INCLUDES}" HAVE_LOCALTIME_R)
@ -889,6 +884,9 @@ check_symbol_exists(freeifaddrs "${CURL_INCLUDES}" HAVE_FREEIFADDRS)
check_symbol_exists(pipe "${CURL_INCLUDES}" HAVE_PIPE)
check_symbol_exists(ftruncate "${CURL_INCLUDES}" HAVE_FTRUNCATE)
check_symbol_exists(getprotobyname "${CURL_INCLUDES}" HAVE_GETPROTOBYNAME)
check_symbol_exists(getpeername "${CURL_INCLUDES}" HAVE_GETPEERNAME)
check_symbol_exists(getsockname "${CURL_INCLUDES}" HAVE_GETSOCKNAME)
check_symbol_exists(if_nametoindex "${CURL_INCLUDES}" HAVE_IF_NAMETOINDEX)
check_symbol_exists(getrlimit "${CURL_INCLUDES}" HAVE_GETRLIMIT)
check_symbol_exists(setlocale "${CURL_INCLUDES}" HAVE_SETLOCALE)
check_symbol_exists(setmode "${CURL_INCLUDES}" HAVE_SETMODE)
@ -972,6 +970,8 @@ foreach(CURL_TEST
HAVE_INET_NTOA_R_DECL_REENTRANT
HAVE_GETADDRINFO
HAVE_FILE_OFFSET_BITS
HAVE_VARIADIC_MACROS_C99
HAVE_VARIADIC_MACROS_GCC
)
curl_internal_test(${CURL_TEST})
endforeach()
@ -1133,7 +1133,7 @@ if(CURL_WERROR)
endif()
# Ugly (but functional) way to include "Makefile.inc" by transforming it (= regenerate it).
function(TRANSFORM_MAKEFILE_INC INPUT_FILE OUTPUT_FILE)
function(transform_makefile_inc INPUT_FILE OUTPUT_FILE)
file(READ ${INPUT_FILE} MAKEFILE_INC_TEXT)
string(REPLACE "$(top_srcdir)" "\${CURL_SOURCE_DIR}" MAKEFILE_INC_TEXT ${MAKEFILE_INC_TEXT})
string(REPLACE "$(top_builddir)" "\${CURL_BINARY_DIR}" MAKEFILE_INC_TEXT ${MAKEFILE_INC_TEXT})
@ -1182,10 +1182,7 @@ endfunction()
# Clear list and try to detect available features
set(_items)
_add_if("WinSSL" SSL_ENABLED AND USE_WINDOWS_SSPI)
_add_if("OpenSSL" SSL_ENABLED AND USE_OPENSSL)
_add_if("DarwinSSL" SSL_ENABLED AND USE_DARWINSSL)
_add_if("mbedTLS" SSL_ENABLED AND USE_MBEDTLS)
_add_if("SSL" SSL_ENABLED)
_add_if("IPv6" ENABLE_IPV6)
_add_if("unix-sockets" USE_UNIX_SOCKETS)
_add_if("libz" HAVE_LIBZ)
@ -1203,7 +1200,7 @@ _add_if("Kerberos" NOT CURL_DISABLE_CRYPTO_AUTH AND
(HAVE_GSSAPI OR USE_WINDOWS_SSPI))
# NTLM support requires crypto function adaptions from various SSL libs
# TODO alternative SSL libs tests for SSP1, GNUTLS, NSS
if(NOT CURL_DISABLE_CRYPTO_AUTH AND (USE_OPENSSL OR USE_WINDOWS_SSPI OR USE_DARWINSSL OR USE_MBEDTLS))
if(NOT CURL_DISABLE_CRYPTO_AUTH AND (USE_OPENSSL OR USE_WINDOWS_SSPI OR USE_SECTRANSP OR USE_MBEDTLS))
_add_if("NTLM" 1)
# TODO missing option (autoconf: --enable-ntlm-wb)
_add_if("NTLM_WB" NOT CURL_DISABLE_HTTP AND NTLM_WB_ENABLED)
@ -1242,10 +1239,24 @@ _add_if("SCP" USE_LIBSSH2)
_add_if("SFTP" USE_LIBSSH2)
_add_if("RTSP" NOT CURL_DISABLE_RTSP)
_add_if("RTMP" USE_LIBRTMP)
list(SORT _items)
if(_items)
list(SORT _items)
endif()
string(REPLACE ";" " " SUPPORT_PROTOCOLS "${_items}")
message(STATUS "Enabled protocols: ${SUPPORT_PROTOCOLS}")
# Clear list and collect SSL backends
set(_items)
_add_if("WinSSL" SSL_ENABLED AND USE_WINDOWS_SSPI)
_add_if("OpenSSL" SSL_ENABLED AND USE_OPENSSL)
_add_if("Secure Transport" SSL_ENABLED AND USE_SECTRANSP)
_add_if("mbedTLS" SSL_ENABLED AND USE_MBEDTLS)
if(_items)
list(SORT _items)
endif()
string(REPLACE ";" " " SSL_BACKENDS "${_items}")
message(STATUS "Enabled SSL backends: ${SSL_BACKENDS}")
# curl-config needs the following options to be set.
set(CC "${CMAKE_C_COMPILER}")
# TODO probably put a -D... options here?

View file

@ -1,6 +1,6 @@
COPYRIGHT AND PERMISSION NOTICE
Copyright (c) 1996 - 2018, Daniel Stenberg, <daniel@haxx.se>, and many
Copyright (c) 1996 - 2019, Daniel Stenberg, <daniel@haxx.se>, and many
contributors, see the THANKS file.
All rights reserved.

View file

@ -154,10 +154,21 @@ VC_DIST = projects/README \
WINBUILD_DIST = winbuild/BUILD.WINDOWS.txt winbuild/gen_resp_file.bat \
winbuild/MakefileBuild.vc winbuild/Makefile.vc
PLAN9_DIST = plan9/include/mkfile \
plan9/include/mkfile \
plan9/mkfile.proto \
plan9/mkfile \
plan9/BUILD.PLAN9.txt \
plan9/lib/mkfile.inc \
plan9/lib/mkfile \
plan9/src/mkfile.inc \
plan9/src/mkfile
EXTRA_DIST = CHANGES COPYING maketgz Makefile.dist curl-config.in \
RELEASE-NOTES buildconf libcurl.pc.in MacOSX-Framework scripts/zsh.pl \
scripts/updatemanpages.pl $(CMAKE_DIST) $(VC_DIST) $(WINBUILD_DIST) \
lib/libcurl.vers.in buildconf.bat scripts/coverage.sh
RELEASE-NOTES buildconf libcurl.pc.in MacOSX-Framework \
scripts/updatemanpages.pl $(CMAKE_DIST) \
$(VC_DIST) $(WINBUILD_DIST) $(PLAN9_DIST) \
lib/libcurl.vers.in buildconf.bat scripts/coverage.sh scripts/completion.pl
CLEANFILES = $(VC6_LIBDSP) $(VC6_SRCDSP) $(VC7_LIBVCPROJ) $(VC7_SRCVCPROJ) \
$(VC71_LIBVCPROJ) $(VC71_SRCVCPROJ) $(VC8_LIBVCPROJ) $(VC8_SRCVCPROJ) \

View file

@ -43,7 +43,7 @@
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 1998 - 2018, Daniel Stenberg, <daniel@haxx.se>, et al.
# Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
@ -144,8 +144,7 @@ build_triplet = @build@
host_triplet = @host@
subdir = .
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/ax_code_coverage.m4 \
$(top_srcdir)/m4/ax_compile_check_sizeof.m4 \
am__aclocal_m4_deps = $(top_srcdir)/m4/ax_compile_check_sizeof.m4 \
$(top_srcdir)/m4/curl-compilers.m4 \
$(top_srcdir)/m4/curl-confopts.m4 \
$(top_srcdir)/m4/curl-functions.m4 \
@ -319,12 +318,6 @@ CC = @CC@
CCDEPMODE = @CCDEPMODE@
CFLAGS = @CFLAGS@
CFLAG_CURL_SYMBOL_HIDING = @CFLAG_CURL_SYMBOL_HIDING@
CODE_COVERAGE_CFLAGS = @CODE_COVERAGE_CFLAGS@
CODE_COVERAGE_CPPFLAGS = @CODE_COVERAGE_CPPFLAGS@
CODE_COVERAGE_CXXFLAGS = @CODE_COVERAGE_CXXFLAGS@
CODE_COVERAGE_ENABLED = @CODE_COVERAGE_ENABLED@
CODE_COVERAGE_LDFLAGS = @CODE_COVERAGE_LDFLAGS@
CODE_COVERAGE_LIBS = @CODE_COVERAGE_LIBS@
CONFIGURE_OPTIONS = @CONFIGURE_OPTIONS@
CPP = @CPP@
CPPFLAGS = @CPPFLAGS@
@ -366,14 +359,15 @@ ENABLE_SHARED = @ENABLE_SHARED@
ENABLE_STATIC = @ENABLE_STATIC@
EXEEXT = @EXEEXT@
FGREP = @FGREP@
FISH_FUNCTIONS_DIR = @FISH_FUNCTIONS_DIR@
GCOV = @GCOV@
GENHTML = @GENHTML@
GREP = @GREP@
HAVE_BROTLI = @HAVE_BROTLI@
HAVE_GNUTLS_SRP = @HAVE_GNUTLS_SRP@
HAVE_LDAP_SSL = @HAVE_LDAP_SSL@
HAVE_LIBZ = @HAVE_LIBZ@
HAVE_OPENSSL_SRP = @HAVE_OPENSSL_SRP@
HAVE_PROTO_BSDSOCKET_H = @HAVE_PROTO_BSDSOCKET_H@
IDN_ENABLED = @IDN_ENABLED@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
@ -434,8 +428,6 @@ STRIP = @STRIP@
SUPPORT_FEATURES = @SUPPORT_FEATURES@
SUPPORT_PROTOCOLS = @SUPPORT_PROTOCOLS@
USE_ARES = @USE_ARES@
USE_CYASSL = @USE_CYASSL@
USE_DARWINSSL = @USE_DARWINSSL@
USE_GNUTLS = @USE_GNUTLS@
USE_GNUTLS_NETTLE = @USE_GNUTLS_NETTLE@
USE_LIBRTMP = @USE_LIBRTMP@
@ -444,12 +436,17 @@ USE_LIBSSH2 = @USE_LIBSSH2@
USE_MBEDTLS = @USE_MBEDTLS@
USE_MESALINK = @USE_MESALINK@
USE_NGHTTP2 = @USE_NGHTTP2@
USE_NGHTTP3 = @USE_NGHTTP3@
USE_NGTCP2 = @USE_NGTCP2@
USE_NGTCP2_CRYPTO_OPENSSL = @USE_NGTCP2_CRYPTO_OPENSSL@
USE_NSS = @USE_NSS@
USE_OPENLDAP = @USE_OPENLDAP@
USE_POLARSSL = @USE_POLARSSL@
USE_QUICHE = @USE_QUICHE@
USE_SCHANNEL = @USE_SCHANNEL@
USE_SECTRANSP = @USE_SECTRANSP@
USE_UNIX_SOCKETS = @USE_UNIX_SOCKETS@
USE_WINDOWS_SSPI = @USE_WINDOWS_SSPI@
USE_WOLFSSL = @USE_WOLFSSL@
VERSION = @VERSION@
VERSIONNUM = @VERSIONNUM@
ZLIB_LIBS = @ZLIB_LIBS@
@ -631,10 +628,21 @@ VC_DIST = projects/README \
WINBUILD_DIST = winbuild/BUILD.WINDOWS.txt winbuild/gen_resp_file.bat \
winbuild/MakefileBuild.vc winbuild/Makefile.vc
PLAN9_DIST = plan9/include/mkfile \
plan9/include/mkfile \
plan9/mkfile.proto \
plan9/mkfile \
plan9/BUILD.PLAN9.txt \
plan9/lib/mkfile.inc \
plan9/lib/mkfile \
plan9/src/mkfile.inc \
plan9/src/mkfile
EXTRA_DIST = CHANGES COPYING maketgz Makefile.dist curl-config.in \
RELEASE-NOTES buildconf libcurl.pc.in MacOSX-Framework scripts/zsh.pl \
scripts/updatemanpages.pl $(CMAKE_DIST) $(VC_DIST) $(WINBUILD_DIST) \
lib/libcurl.vers.in buildconf.bat scripts/coverage.sh
RELEASE-NOTES buildconf libcurl.pc.in MacOSX-Framework \
scripts/updatemanpages.pl $(CMAKE_DIST) \
$(VC_DIST) $(WINBUILD_DIST) $(PLAN9_DIST) \
lib/libcurl.vers.in buildconf.bat scripts/coverage.sh scripts/completion.pl
CLEANFILES = $(VC6_LIBDSP) $(VC6_SRCDSP) $(VC7_LIBVCPROJ) $(VC7_SRCVCPROJ) \
$(VC71_LIBVCPROJ) $(VC71_SRCVCPROJ) $(VC8_LIBVCPROJ) $(VC8_SRCVCPROJ) \
@ -655,14 +663,17 @@ LIB_VAUTH_CFILES = vauth/vauth.c vauth/cleartext.c vauth/cram.c \
LIB_VAUTH_HFILES = vauth/vauth.h vauth/digest.h vauth/ntlm.h
LIB_VTLS_CFILES = vtls/openssl.c vtls/gtls.c vtls/vtls.c vtls/nss.c \
vtls/polarssl.c vtls/polarssl_threadlock.c \
vtls/cyassl.c vtls/schannel.c vtls/schannel_verify.c \
vtls/darwinssl.c vtls/gskit.c vtls/mbedtls.c vtls/mesalink.c
vtls/wolfssl.c vtls/schannel.c vtls/schannel_verify.c \
vtls/sectransp.c vtls/gskit.c vtls/mbedtls.c vtls/mesalink.c
LIB_VTLS_HFILES = vtls/openssl.h vtls/vtls.h vtls/gtls.h \
vtls/nssg.h vtls/polarssl.h vtls/polarssl_threadlock.h \
vtls/cyassl.h vtls/schannel.h vtls/darwinssl.h vtls/gskit.h \
vtls/wolfssl.h vtls/schannel.h vtls/sectransp.h vtls/gskit.h \
vtls/mbedtls.h vtls/mesalink.h
LIB_VQUIC_CFILES = vquic/ngtcp2.c vquic/quiche.c
LIB_VQUIC_HFILES = vquic/ngtcp2.h vquic/quiche.h
LIB_VSSH_CFILES = vssh/libssh2.c vssh/libssh.c
LIB_CFILES = file.c timeval.c base64.c hostip.c progress.c formdata.c \
cookie.c http.c sendf.c ftp.c url.c dict.c if2ip.c speedcheck.c \
ldap.c version.c getenv.c escape.c mprintf.c telnet.c netrc.c \
@ -672,16 +683,16 @@ LIB_CFILES = file.c timeval.c base64.c hostip.c progress.c formdata.c \
http_digest.c md4.c md5.c http_negotiate.c inet_pton.c strtoofft.c \
strerror.c amigaos.c hostasyn.c hostip4.c hostip6.c hostsyn.c \
inet_ntop.c parsedate.c select.c tftp.c splay.c strdup.c socks.c \
ssh.c ssh-libssh.c curl_addrinfo.c socks_gssapi.c socks_sspi.c \
curl_addrinfo.c socks_gssapi.c socks_sspi.c \
curl_sspi.c slist.c nonblock.c curl_memrchr.c imap.c pop3.c smtp.c \
pingpong.c rtsp.c curl_threads.c warnless.c hmac.c curl_rtmp.c \
openldap.c curl_gethostname.c gopher.c idn_win32.c \
http_proxy.c non-ascii.c asyn-ares.c asyn-thread.c curl_gssapi.c \
http_ntlm.c curl_ntlm_wb.c curl_ntlm_core.c curl_sasl.c rand.c \
curl_multibyte.c hostcheck.c conncache.c pipeline.c dotdot.c \
curl_multibyte.c hostcheck.c conncache.c dotdot.c \
x509asn1.c http2.c smb.c curl_endian.c curl_des.c system_win32.c \
mime.c sha256.c setopt.c curl_path.c curl_ctype.c curl_range.c psl.c \
doh.c urlapi.c
doh.c urlapi.c curl_get_line.c altsvc.c socketpair.c
LIB_HFILES = arpa_telnet.h netrc.h file.h timeval.h hostip.h progress.h \
formdata.h cookie.h http.h sendf.h ftp.h url.h dict.h if2ip.h \
@ -698,14 +709,19 @@ LIB_HFILES = arpa_telnet.h netrc.h file.h timeval.h hostip.h progress.h \
curl_gethostname.h gopher.h http_proxy.h non-ascii.h asyn.h \
http_ntlm.h curl_gssapi.h curl_ntlm_wb.h curl_ntlm_core.h \
curl_sasl.h curl_multibyte.h hostcheck.h conncache.h \
curl_setup_once.h multihandle.h setup-vms.h pipeline.h dotdot.h \
curl_setup_once.h multihandle.h setup-vms.h dotdot.h \
x509asn1.h http2.h sigpipe.h smb.h curl_endian.h curl_des.h \
curl_printf.h system_win32.h rand.h mime.h curl_sha256.h setopt.h \
curl_path.h curl_ctype.h curl_range.h psl.h doh.h urlapi-int.h
curl_path.h curl_ctype.h curl_range.h psl.h doh.h urlapi-int.h \
curl_get_line.h altsvc.h quic.h socketpair.h
LIB_RCFILES = libcurl.rc
CSOURCES = $(LIB_CFILES) $(LIB_VAUTH_CFILES) $(LIB_VTLS_CFILES)
HHEADERS = $(LIB_HFILES) $(LIB_VAUTH_HFILES) $(LIB_VTLS_HFILES)
CSOURCES = $(LIB_CFILES) $(LIB_VAUTH_CFILES) $(LIB_VTLS_CFILES) \
$(LIB_VQUIC_CFILES) $(LIB_VSSH_CFILES)
HHEADERS = $(LIB_HFILES) $(LIB_VAUTH_HFILES) $(LIB_VTLS_HFILES) \
$(LIB_VQUIC_HFILES)
# libcurl has sources that provide functions named curlx_* that aren't part of
# the official API, but we re-use the code here to avoid duplication.
@ -754,6 +770,7 @@ CURL_CFILES = \
tool_panykey.c \
tool_paramhlp.c \
tool_parsecfg.c \
tool_progress.c \
tool_strdup.c \
tool_setopt.c \
tool_sleep.c \
@ -795,6 +812,7 @@ CURL_HFILES = \
tool_panykey.h \
tool_paramhlp.h \
tool_parsecfg.h \
tool_progress.h \
tool_sdecls.h \
tool_setopt.h \
tool_setup.h \

View file

@ -42,6 +42,12 @@ GIT
(you'll get a directory named curl created, filled with the source code)
SECURITY PROBLEMS
Report suspected security problems via our HackerOne page and not in public!
https://hackerone.com/curl
NOTICE
Curl contains pieces of source code that is Copyright (c) 1998, 1999

View file

@ -1,98 +1,144 @@
curl and libcurl 7.63.0
curl and libcurl 7.67.0
Public curl releases: 178
Command line options: 219
curl_easy_setopt() options: 262
Public functions in libcurl: 80
Contributors: 1829
Public curl releases: 186
Command line options: 226
curl_easy_setopt() options: 269
Public functions in libcurl: 81
Contributors: 2056
This release includes the following changes:
o curl: add %{stderr} and %{stdout} for --write-out [24]
o curl: add undocumented option --dump-module-paths for win32 [19]
o setopt: add CURLOPT_CURLU [27]
o curl: added --no-progress-meter [73]
o setopt: CURLMOPT_MAX_CONCURRENT_STREAMS is new [55]
o urlapi: CURLU_NO_AUTHORITY allows empty authority/host part [22]
This release includes the following bugfixes:
o (lib)curl.rc: fixup for minor bugs [63]
o CURLINFO_REDIRECT_URL: extract the Location: header field unvalidated [73]
o CURLOPT_HEADERFUNCTION.3: match 'nitems' name in synopsis and description [45]
o CURLOPT_WRITEFUNCTION.3: spell out that it gets called many times
o Curl_follow: accept non-supported schemes for "fake" redirects [9]
o KNOWN_BUGS: add --proxy-any connection issue [28]
o NTLM: Remove redundant ifdef USE_OPENSSL [41]
o NTLM: force the connection to HTTP/1.1 [67]
o OS400: add URL API ccsid wrappers and sync ILE/RPG bindings
o SECURITY-PROCESS: bountygraph shuts down again [50]
o TODO: Have the URL API offer IDN decoding [22]
o ares: remove fd from multi fd set when ares is about to close the fd [42]
o axtls: removed [1]
o checksrc: add COPYRIGHTYEAR check [62]
o cmake: fix MIT/Heimdal Kerberos detection [53]
o configure: include all libraries in ssl-libs fetch [55]
o configure: show CFLAGS, LDFLAGS etc in summary [7]
o connect: fix building for recent versions of Minix [52]
o cookies: create the cookiejar even if no cookies to save [48]
o cookies: expire "Max-Age=0" immediately [64]
o curl: --local-port range was not "including" [29]
o curl: fix --local-port integer overflow [25]
o curl: fix memory leak reading --writeout from file [51]
o curl: fixed UTF-8 in current console code page (Windows) [16]
o curl_easy_perform: fix timeout handling [49]
o curl_global_sslset(): id == -1 is not necessarily an error [68]
o curl_multibyte: fix a malloc overcalculation [18]
o curle: move deprecated error code to ifndef block [40]
o docs: curl_formadd field and file names are now escaped [72]
o docs: escape "\n" codes [26]
o doh: fix memory leak in OOM situation [56]
o doh: make it work for h2-disabled builds too [57]
o examples/ephiperfifo: report error when epoll_ctl fails
o ftp: avoid two unsigned int overflows in FTP listing parser [30]
o host names: allow trailing dot in name resolve, then strip it [46]
o http2: Upon HTTP_1_1_REQUIRED, retry the request with HTTP/1.1 [65]
o http: don't set CURLINFO_CONDITION_UNMET for http status code 204 [70]
o http: fix HTTP Digest auth to include query in URI [69]
o http_negotiate: do not close connection until negotiation is completed [36]
o impacket: add LICENSE [39]
o infof: clearly indicate truncation [14]
o ldap: fix LDAP URL parsing regressions [71]
o libcurl: stop reading from paused transfers [20]
o mprintf: avoid unsigned integer overflow warning [10]
o netrc: don't ignore the login name specified with "--user" [17]
o nss: Fall back to latest supported SSL version [60]
o nss: Fix compatibility with nss versions 3.14 to 3.15 [61]
o nss: fix fallthrough comment to fix picky compiler warning
o nss: remove version selecting dead code [33]
o nss: set default max-tls to 1.3/1.2 [32]
o openssl: Remove SSLEAY leftovers [37]
o openssl: do not log excess "TLS app data" lines for TLS 1.3 [34]
o openssl: do not use file BIOs if not requested [59]
o openssl: fix unused variable compiler warning with old openssl [66]
o openssl: support session resume with TLS 1.3 [44]
o openvms: fix example name [8]
o os400: Add curl_easy_conn_upkeep() to ILE/RPG binding
o os400: add CURLOPT_CURLU to ILE/RPG binding
o os400: fix return type of curl_easy_pause() in ILE/RPG binding
o packages: remove old leftover files and dirs [58]
o pop3: only do APOP with a valid timestamp [35]
o runtests: use the local curl for verifying [6]
o schannel: be consistent in Schannel capitalization [23]
o schannel: better CURLOPT_CERTINFO support [2]
o schannel: use Curl_ prefix for global private symbols [4]
o snprintf: renamed and we now only use msnprintf() [47]
o ssl: fix compilation with OpenSSL 0.9.7 [43]
o ssl: replace all internal uses of CURLE_SSL_CACERT [40]
o symbols-in-versions: add missing CURLU_ symbols [15]
o test328: verify Content-Encoding: none [54]
o tests: disable SO_EXCLUSIVEADDRUSE for stunnel on Windows
o tests: drop http_pipe.py script no longer used [5]
o tool_cb_wrt: Silence function cast compiler warning [31]
o tool_doswin: Fix uninitialized field warning [38]
o travis: build with clang sanitizers [3]
o travis: remove curl before a normal build [11]
o url: a short host name + port is not a scheme [13]
o url: fix IPv6 numeral address parser [12]
o urlapi: only skip encoding the first '=' with APPENDQUERY set [21]
o BINDINGS: five new bindings addded
o CURLOPT_TIMEOUT.3: Clarify transfer timeout time includes queue time [78]
o CURLOPT_TIMEOUT.3: remove the mention of "minutes" [74]
o ESNI: initial build/setup support [71]
o FTP: FTPFILE_NOCWD: avoid redundant CWDs [28]
o FTP: allow "rubbish" prepended to the SIZE response [15]
o FTP: remove trailing slash from path for LIST/MLSD [6]
o FTP: skip CWD to entry dir when target is absolute [16]
o FTP: url-decode path before evaluation [36]
o HTTP3.md: move -p for mkdir, remove -j for make [46]
o HTTP3: fix invalid use of sendto for connected UDP socket [109]
o HTTP3: fix ngtcp2 Windows build [93]
o HTTP3: fix prefix parameter for ngtcp2 build [40]
o HTTP3: fix typo somehere1 > somewhere1 [108]
o HTTP3: show an --alt-svc using example too
o INSTALL: add missing space for configure commands [106]
o INSTALL: add vcpkg installation instructions [35]
o README: minor grammar fix [39]
o altsvc: accept quoted ma and persist values [60]
o altsvc: both backends run h3-23 now [31]
o appveyor: Add MSVC ARM64 build [87]
o appveyor: Use two parallel compilation on appveyor with CMake [98]
o appveyor: add --disable-proxy autotools build [94]
o appveyor: add 32-bit MinGW-w64 build [58]
o appveyor: add a winbuild [14]
o appveyor: add a winbuild that uses VS2017 [84]
o appveyor: make winbuilds with DEBUG=no/yes and VS 2015/2017 [95]
o appveyor: publish artifacts on appveyor [105]
o appveyor: upgrade VS2017 to VS2019 [29]
o asyn-thread: make use of Curl_socketpair() where available [85]
o asyn-thread: s/AF_LOCAL/AF_UNIX for Solaris [3]
o build: Remove unused HAVE_LIBSSL and HAVE_LIBCRYPTO defines [77]
o checksrc: fix uninitialized variable warning [57]
o chunked-encoding: stop hiding the CURLE_BAD_CONTENT_ENCODING error [56]
o cirrus: Increase the git clone depth
o cirrus: Switch the FreeBSD 11.x build to 11.3 and add a 13.0 build
o cirrus: switch off blackhole status on the freebsd CI machines [72]
o cleanups: 21 various PVS-Studio warnings [24]
o configure: only say ipv6 enabled when the variable is set [110]
o configure: remove all cyassl references [90]
o conn-reuse: requests wanting NTLM can reuse non-NTLM connections [99]
o connect: return CURLE_OPERATION_TIMEDOUT for errno == ETIMEDOUT [72]
o connect: silence sign-compare warning [83]
o cookie: avoid harmless use after free [69]
o cookie: pass in the correct cookie amount to qsort() [27]
o cookies: change argument type for Curl_flush_cookies [67]
o cookies: using a share with cookies shouldn't enable the cookie engine [63]
o copyrights: update copyright notices to 2019 [101]
o curl: create easy handles on-demand and not ahead of time [54]
o curl: ensure HTTP 429 triggers --retry [64]
o curl: exit the create_transfers loop on errors [33]
o curl: fix memory leaked by parse_metalink() [17]
o curl: load large files with -d @ much faster [19]
o docs/HTTP3: fix `--with-ssl` ngtcp2 configure flag [21]
o docs: added multi-event.c example [75]
o docs: disambiguate CURLUPART_HOST is for host name (ie no port) [62]
o docs: note on failed handles not being counted by curl_multi_perform [70]
o doh: allow only http and https in debug mode [48]
o doh: avoid truncating DNS QTYPE to lower octet [23]
o doh: clean up dangling DOH memory on easy close [9]
o doh: fix (harmless) buffer overrun [13]
o doh: fix undefined behaviour and open up for gcc and clang optimization [12]
o doh: return early if there is no time left [48]
o examples/sslbackend: fix -Wchar-subscripts warning [89]
o examples: remove the "this exact code has not been verified"
o git: add tests/server/disabled to .gitignore [59]
o gnutls: make gnutls_bye() not wait for response on shutdown [104]
o http2: expire a timeout at end of stream [88]
o http2: prevent dup'ed handles to send dummy PRIORITY frames [68]
o http2: relax verification of :authority in push promise requests [8]
o http2_recv: a closed stream trumps pause state [88]
o http: lowercase headernames for HTTP/2 and HTTP/3 [49]
o ldap: Stop using wide char version of ldapp_err2string [1]
o ldap: fix OOM error on missing query string [76]
o mbedtls: add error message for cert validity starting in the future [102]
o mime: when disabled, avoid C99 macro [7]
o ngtcp2: adapt to API change [66]
o ngtcp2: compile with latest ngtcp2 + nghttp3 draft-23 [25]
o ngtcp2: remove fprintf() calls [43]
o openssl: close_notify on the FTP data connection doesn't mean closure [20]
o openssl: fix compiler warning with LibreSSL [34]
o openssl: use strerror on SSL_ERROR_SYSCALL [41]
o os400: getpeername() and getsockname() return ebcdic AF_UNIX sockaddr [47]
o parsedate: fix date parsing disabled builds [18]
o quiche: don't close connection at end of stream
o quiche: persist connection details (fixes -I with --http3) [11]
o quiche: set 'drain' when returning without having drained the queues
o quiche: update HTTP/3 config creation to new API [61]
o redirect: handle redirects to absolute URLs containing spaces [52]
o runtests: get textaware info from curl instead of perl [86]
o schannel: reverse the order of certinfo insertions [96]
o schannel_verify: Fix concurrent openings of CA file [103]
o security: silence conversion warning [83]
o setopt: handle ALTSVC set to NULL
o setopt: make it easier to add new enum values [4]
o setopt: store CURLOPT_RTSP_SERVER_CSEQ correctly [24]
o smb: check for full size message before reading message details [10]
o smbserver: fix Python 3 compatibility [82]
o socks: Fix destination host shown on SOCKS5 error [32]
o test1162: disable MSYS2's POSIX path conversion
o test1591: fix spelling of http feature [97]
o tests: add `connect to non-listen` keywords [91]
o tests: fix narrowing conversion warnings [37]
o tests: fix the test 3001 cert failures [100]
o tests: makes tests succeed when using --disable-proxy [81]
o tests: use %FILE_PWD for file:// URLs [92]
o tests: use port 2 instead of 60000 for a safer non-listening port [72]
o tool_operate: Fix retry sleep time shown to user when Retry-After [79]
o travis: Add an ARM64 build
o url: Curl_free_request_state() should also free doh handles [107]
o url: don't set appconnect time for non-ssl/non-ssh connections [42]
o url: fix the NULL hostname compiler warning [44]
o url: normalize CURLINFO_EFFECTIVE_URL [80]
o url: only reuse TLS connections with matching pinning [5]
o urlapi: avoid index underflow for short ipv6 hostnames [26]
o urlapi: fix URL encoding when setting a full URL [53]
o urlapi: fix unused variable warning [57]
o urlapi: question mark within fragment is still fragment [45]
o urldata: use 'bool' for the bit type on MSVC compilers [30]
o vtls: Fix comment typo about macosx-version-min compiler flag [38]
o vtls: fix narrowing conversion warnings [50]
o winbuild/MakefileBuild.vc: Add vssh [2]
o winbuild/MakefileBuild.vc: Fix line endings
o winbuild: Add manifest to curl.exe for proper OS version detection [51]
o winbuild: add ENABLE_UNICODE option [65]
This release includes the following known bugs:
@ -101,94 +147,136 @@ This release includes the following known bugs:
This release would not have looked like this without help, code, reports and
advice from friends like these:
Alessandro Ghedini, Alexey Melnichuk, Antoni Villalonga, Ben Greear,
bobmitchell1956 on github, Brad King, Brian Carpenter, daboul on github,
Daniel Gustafsson, Daniel Stenberg, Dave Reisner, David Benjamin,
Dheeraj Sangamkar, dtmsecurity on github, Elia Tufarolo, Frank Gevaerts,
Gergely Nagy, Gisle Vanem, Hagai Auro, Han Han, infinnovation-dev on github,
James Knight, Jérémy Rocher, Jeroen Ooms, Jim Fuller, Johannes Schindelin,
Kamil Dudka, Konstantin Kushnir, Marcel Raad, Marc Hörsken, Marcos Diazr,
Michael Kaufmann, NTMan on Github, Patrick Monnerat, Paul Howarth,
Pavel Pavlov, Peter Wu, Ray Satiro, Rod Widdowson, Romain Fliedel,
Samuel Surtees, Sevan Janiyan, Stefan Kanthak, Sven Blumenstein, Tim Rühsen,
Tobias Hintze, Tomas Hoger, tonystz on Github, tpaukrt on github,
Viktor Szakats, Yasuhiro Matsumoto,
(51 contributors)
Alessandro Ghedini, Alex Konev, Alex Samorukov, Andrei Valeriu BICA,
Barry Pollard, Bastien Bouclet, Bernhard Walle, Bylon2 on github,
Christophe Dervieux, Christoph M. Becker, Dagobert Michelsen, Dan Fandrich,
Daniel Silverstone, Daniel Stenberg, Denis Chaplygin, Emil Engler,
Francois Rivard, George Liu, Gilles Vollant, Griffin Downs, Harry Sintonen,
Ilya Kosarev, infinnovation-dev on github, Jacob Barthelmeh, Javier Blazquez,
Jens Finkhaeuser, Jeremy Lainé, Jeroen Ooms, Jimmy Gaussen, Joel Depooter,
Jojojov on github, jzinn on github, Kamil Dudka, Kunal Ekawde, Lucas Pardue,
Lucas Severo, Marcel Hernandez, Marcel Raad, Martin Gartner, Max Dymond,
Michael Kaufmann, Michał Janiszewski, momala454 on github,
Nathaniel J. Smith, Niall O'Reilly, nico-abram on github,
Nikos Mavrogiannopoulos, Patrick Monnerat, Paul B. Omta, Paul Dreik,
Peter Sumatra, Philippe Marguinaud, Piotr Komborski, Ray Satiro,
Richard Alcock, Roland Hieber, Samuel Surtees, Sebastian Haglund,
Spezifant on github, Stian Soiland-Reyes, SumatraPeter on github,
Tatsuhiro Tsujikawa, Tom van der Woerdt, Trivikram Kamat,
Valerii Zapodovnikov, Vilhelm Prytz, Yechiel Kalmenson, Zenju on github,
(68 contributors)
Thanks! (and sorry if I forgot to mention someone)
References to bug reports and discussions on issues:
[1] = https://curl.haxx.se/bug/?i=3194
[2] = https://curl.haxx.se/bug/?i=3197
[3] = https://curl.haxx.se/bug/?i=3190
[4] = https://curl.haxx.se/bug/?i=3201
[5] = https://curl.haxx.se/bug/?i=3204
[6] = https://curl.haxx.se/mail/lib-2018-10/0118.html
[7] = https://curl.haxx.se/bug/?i=3207
[8] = https://curl.haxx.se/bug/?i=3217
[9] = https://curl.haxx.se/bug/?i=3210
[10] = https://curl.haxx.se/bug/?i=3184
[11] = https://curl.haxx.se/bug/?i=3198
[12] = https://curl.haxx.se/bug/?i=3218
[13] = https://curl.haxx.se/bug/?i=3220
[14] = https://curl.haxx.se/bug/?i=3216
[15] = https://curl.haxx.se/bug/?i=3226
[16] = https://curl.haxx.se/bug/?i=3211
[17] = https://curl.haxx.se/bug/?i=3213
[18] = https://curl.haxx.se/bug/?i=3209
[19] = https://curl.haxx.se/bug/?i=3208
[20] = https://curl.haxx.se/bug/?i=3240
[21] = https://curl.haxx.se/bug/?i=3231
[22] = https://curl.haxx.se/bug/?i=3232
[23] = https://curl.haxx.se/bug/?i=3243
[24] = https://curl.haxx.se/bug/?i=3115
[25] = https://curl.haxx.se/bug/?i=3242
[26] = https://curl.haxx.se/bug/?i=3246
[27] = https://curl.haxx.se/bug/?i=3227
[28] = https://curl.haxx.se/bug/?i=876
[29] = https://curl.haxx.se/bug/?i=3251
[30] = https://curl.haxx.se/bug/?i=3225
[31] = https://curl.haxx.se/bug/?i=3263
[32] = https://curl.haxx.se/bug/?i=3261
[33] = https://curl.haxx.se/bug/?i=3262
[34] = https://curl.haxx.se/bug/?i=3281
[35] = https://curl.haxx.se/bug/?i=3278
[36] = https://curl.haxx.se/bug/?i=3275
[37] = https://curl.haxx.se/bug/?i=3270
[38] = https://curl.haxx.se/bug/?i=3254
[39] = https://curl.haxx.se/bug/?i=3276
[40] = https://curl.haxx.se/bug/?i=3291
[41] = https://curl.haxx.se/bug/?i=3269
[42] = https://curl.haxx.se/bug/?i=3238
[43] = https://curl.haxx.se/bug/?i=3266
[44] = https://curl.haxx.se/bug/?i=3202
[45] = https://curl.haxx.se/bug/?i=3295
[46] = https://curl.haxx.se/bug/?i=3022
[47] = https://curl.haxx.se/bug/?i=3296
[48] = https://curl.haxx.se/bug/?i=3299
[49] = https://curl.haxx.se/bug/?i=3305
[50] = https://curl.haxx.se/bug/?i=3311
[51] = https://curl.haxx.se/bug/?i=3322
[52] = https://curl.haxx.se/bug/?i=3323
[53] = https://curl.haxx.se/bug/?i=3316
[54] = https://curl.haxx.se/bug/?i=3317
[55] = https://curl.haxx.se/bug/?i=3193
[56] = https://curl.haxx.se/bug/?i=3342
[57] = https://curl.haxx.se/bug/?i=3325
[58] = https://curl.haxx.se/bug/?i=3331
[59] = https://curl.haxx.se/bug/?i=3339
[60] = https://curl.haxx.se/bug/?i=3261
[61] = https://curl.haxx.se/bug/?i=3337
[62] = https://curl.haxx.se/bug/?i=3303
[63] = https://curl.haxx.se/bug/?i=3348
[64] = https://curl.haxx.se/bug/?i=3351
[65] = https://curl.haxx.se/bug/?i=3349
[66] = https://curl.haxx.se/bug/?i=3337
[67] = https://curl.haxx.se/bug/?i=3345
[68] = https://curl.haxx.se/bug/?i=3346
[69] = https://curl.haxx.se/bug/?i=3353
[70] = https://curl.haxx.se/bug/?i=3359
[71] = https://curl.haxx.se/bug/?i=3362
[72] = https://curl.haxx.se/bug/?i=3361
[73] = https://curl.haxx.se/bug/?i=3340
[1] = https://curl.haxx.se/bug/?i=4272
[2] = https://curl.haxx.se/bug/?i=4322
[3] = https://curl.haxx.se/bug/?i=4328
[4] = https://curl.haxx.se/bug/?i=4321
[5] = https://curl.haxx.se/mail/lib-2019-09/0061.html
[6] = https://curl.haxx.se/bug/?i=4348
[7] = https://curl.haxx.se/bug/?i=4368
[8] = https://curl.haxx.se/bug/?i=4365
[9] = https://curl.haxx.se/bug/?i=4366
[10] = https://curl.haxx.se/bug/?i=4363
[11] = https://curl.haxx.se/bug/?i=4358
[12] = https://curl.haxx.se/bug/?i=4350
[13] = https://curl.haxx.se/bug/?i=4352
[14] = https://curl.haxx.se/bug/?i=4324
[15] = https://curl.haxx.se/bug/?i=4339
[16] = https://curl.haxx.se/bug/?i=4332
[17] = https://curl.haxx.se/bug/?i=4326
[18] = https://curl.haxx.se/bug/?i=4325
[19] = https://curl.haxx.se/bug/?i=4336
[20] = https://curl.haxx.se/bug/?i=4329
[21] = https://curl.haxx.se/bug/?i=4338
[22] = https://curl.haxx.se/bug/?i=4349
[23] = https://curl.haxx.se/bug/?i=4381
[24] = https://curl.haxx.se/bug/?i=4374
[25] = https://curl.haxx.se/bug/?i=4392
[26] = https://curl.haxx.se/bug/?i=4389
[27] = https://curl.haxx.se/bug/?i=4386
[28] = https://curl.haxx.se/bug/?i=4382
[29] = https://curl.haxx.se/bug/?i=4383
[30] = https://curl.haxx.se/bug/?i=4387
[31] = https://curl.haxx.se/bug/?i=4395
[32] = https://curl.haxx.se/bug/?i=4394
[33] = https://curl.haxx.se/bug/?i=4393
[34] = https://curl.haxx.se/bug/?i=4397
[35] = https://curl.haxx.se/bug/?i=4435
[36] = https://curl.haxx.se/bug/?i=4428
[37] = https://curl.haxx.se/bug/?i=4415
[38] = https://curl.haxx.se/bug/?i=4425
[39] = https://curl.haxx.se/bug/?i=4431
[40] = https://curl.haxx.se/bug/?i=4430
[41] = https://curl.haxx.se/bug/?i=4411
[42] = https://curl.haxx.se/bug/?i=3760
[43] = https://curl.haxx.se/bug/?i=4421
[44] = https://curl.haxx.se/bug/?i=4403
[45] = https://curl.haxx.se/bug/?i=4412
[46] = https://curl.haxx.se/bug/?i=4407
[47] = https://curl.haxx.se/bug/?i=4214
[48] = https://curl.haxx.se/bug/?i=4406
[49] = https://curl.haxx.se/bug/?i=4400
[50] = https://curl.haxx.se/bug/?i=4398
[51] = https://curl.haxx.se/bug/?i=4399
[52] = https://curl.haxx.se/bug/?i=4445
[53] = https://curl.haxx.se/bug/?i=4447
[54] = https://curl.haxx.se/bug/?i=4393
[55] = https://curl.haxx.se/bug/?i=4410
[56] = https://curl.haxx.se/bug/?i=4310
[57] = https://curl.haxx.se/bug/?i=4444
[58] = https://curl.haxx.se/bug/?i=4433
[59] = https://curl.haxx.se/bug/?i=4441
[60] = https://curl.haxx.se/bug/?i=4443
[61] = https://curl.haxx.se/bug/?i=4437
[62] = https://curl.haxx.se/bug/?i=4424
[63] = https://curl.haxx.se/bug/?i=4429
[64] = https://curl.haxx.se/bug/?i=4465
[65] = https://curl.haxx.se/bug/?i=4308
[66] = https://curl.haxx.se/bug/?i=4457
[67] = https://curl.haxx.se/bug/?i=4455
[68] = https://curl.haxx.se/bug/?i=4303
[69] = https://curl.haxx.se/bug/?i=4454
[70] = https://curl.haxx.se/bug/?i=4446
[71] = https://curl.haxx.se/bug/?i=4011
[72] = https://curl.haxx.se/bug/?i=4461
[73] = https://curl.haxx.se/bug/?i=4422
[74] = https://curl.haxx.se/bug/?i=4469
[75] = https://curl.haxx.se/bug/?i=4471
[76] = https://curl.haxx.se/bug/?i=4467
[77] = https://curl.haxx.se/bug/?i=4460
[78] = https://curl.haxx.se/bug/?i=4486
[79] = https://curl.haxx.se/bug/?i=4498
[80] = https://curl.haxx.se/bug/?i=4491
[81] = https://curl.haxx.se/bug/?i=4488
[82] = https://curl.haxx.se/bug/?i=4484
[83] = https://curl.haxx.se/bug/?i=4483
[84] = https://curl.haxx.se/bug/?i=4482
[85] = https://curl.haxx.se/bug/?i=4466
[86] = https://curl.haxx.se/bug/?i=4506
[87] = https://curl.haxx.se/bug/?i=4507
[88] = https://curl.haxx.se/bug/?i=4496
[89] = https://curl.haxx.se/bug/?i=4503
[90] = https://curl.haxx.se/bug/?i=4502
[91] = https://curl.haxx.se/bug/?i=4511
[92] = https://curl.haxx.se/bug/?i=4512
[93] = https://curl.haxx.se/bug/?i=4531
[94] = https://curl.haxx.se/bug/?i=4526
[95] = https://curl.haxx.se/bug/?i=4523
[96] = https://curl.haxx.se/bug/?i=4518
[97] = https://curl.haxx.se/bug/?i=4520
[98] = https://curl.haxx.se/bug/?i=4508
[99] = https://curl.haxx.se/bug/?i=4499
[100] = https://curl.haxx.se/bug/?i=4551
[101] = https://curl.haxx.se/bug/?i=4547
[102] = https://curl.haxx.se/bug/?i=4552
[103] = https://curl.haxx.se/mail/lib-2019-10/0104.html
[104] = https://curl.haxx.se/bug/?i=4487
[105] = https://curl.haxx.se/bug/?i=4509
[106] = https://curl.haxx.se/bug/?i=4539
[107] = https://curl.haxx.se/bug/?i=4463
[108] = https://curl.haxx.se/bug/?i=4535
[109] = https://curl.haxx.se/bug/?i=4529
[110] = https://curl.haxx.se/bug/?i=4555

View file

@ -791,7 +791,9 @@ AC_DEFUN([CURL_CHECK_LIBS_LDAP], [
'-lldap -llber' \
'-llber -lldap' \
'-lldapssl -lldapx -lldapsdk' \
'-lldapsdk -lldapx -lldapssl' ; do
'-lldapsdk -lldapx -lldapssl' \
'-lldap -llber -lssl -lcrypto' ; do
if test "$curl_cv_ldap_LIBS" = "unknown"; then
if test -z "$x_nlibs"; then
LIBS="$curl_cv_save_LIBS"
@ -1029,6 +1031,10 @@ AC_DEFUN([CURL_CHECK_FUNC_RECV], [
#endif
#endif
#else
#ifdef HAVE_PROTO_BSDSOCKET_H
#include <proto/bsdsocket.h>
struct Library *SocketBase = NULL;
#endif
#ifdef HAVE_SYS_TYPES_H
#include <sys/types.h>
#endif
@ -1074,6 +1080,10 @@ AC_DEFUN([CURL_CHECK_FUNC_RECV], [
#endif
#define RECVCALLCONV PASCAL
#else
#ifdef HAVE_PROTO_BSDSOCKET_H
#include <proto/bsdsocket.h>
struct Library *SocketBase = NULL;
#endif
#ifdef HAVE_SYS_TYPES_H
#include <sys/types.h>
#endif
@ -1082,11 +1092,10 @@ AC_DEFUN([CURL_CHECK_FUNC_RECV], [
#endif
#define RECVCALLCONV
#endif
#ifndef HAVE_PROTO_BSDSOCKET_H
extern $recv_retv RECVCALLCONV
#ifdef __ANDROID__
__attribute__((overloadable))
#endif
recv($recv_arg1, $recv_arg2, $recv_arg3, $recv_arg4);
#endif
]],[[
$recv_arg1 s=0;
$recv_arg2 buf=0;
@ -1166,6 +1175,10 @@ AC_DEFUN([CURL_CHECK_FUNC_SEND], [
#endif
#endif
#else
#ifdef HAVE_PROTO_BSDSOCKET_H
#include <proto/bsdsocket.h>
struct Library *SocketBase = NULL;
#endif
#ifdef HAVE_SYS_TYPES_H
#include <sys/types.h>
#endif
@ -1211,6 +1224,10 @@ AC_DEFUN([CURL_CHECK_FUNC_SEND], [
#endif
#define SENDCALLCONV PASCAL
#else
#ifdef HAVE_PROTO_BSDSOCKET_H
#include <proto/bsdsocket.h>
struct Library *SocketBase = NULL;
#endif
#ifdef HAVE_SYS_TYPES_H
#include <sys/types.h>
#endif
@ -1219,11 +1236,10 @@ AC_DEFUN([CURL_CHECK_FUNC_SEND], [
#endif
#define SENDCALLCONV
#endif
#ifndef HAVE_PROTO_BSDSOCKET_H
extern $send_retv SENDCALLCONV
#ifdef __ANDROID__
__attribute__((overloadable))
#endif
send($send_arg1, $send_arg2, $send_arg3, $send_arg4);
#endif
]],[[
$send_arg1 s=0;
$send_arg3 len=0;
@ -1325,6 +1341,10 @@ AC_DEFUN([CURL_CHECK_MSG_NOSIGNAL], [
#endif
#endif
#else
#ifdef HAVE_PROTO_BSDSOCKET_H
#include <proto/bsdsocket.h>
struct Library *SocketBase = NULL;
#endif
#ifdef HAVE_SYS_TYPES_H
#include <sys/types.h>
#endif
@ -1718,6 +1738,7 @@ dnl using current libraries or if another one is required.
AC_DEFUN([CURL_CHECK_LIBS_CONNECT], [
AC_REQUIRE([CURL_INCLUDES_WINSOCK2])dnl
AC_REQUIRE([CURL_INCLUDES_BSDSOCKET])dnl
AC_MSG_CHECKING([for connect in libraries])
tst_connect_save_LIBS="$LIBS"
tst_connect_need_LIBS="unknown"
@ -1727,7 +1748,8 @@ AC_DEFUN([CURL_CHECK_LIBS_CONNECT], [
AC_LINK_IFELSE([
AC_LANG_PROGRAM([[
$curl_includes_winsock2
#ifndef HAVE_WINDOWS_H
$curl_includes_bsdsocket
#if !defined(HAVE_WINDOWS_H) && !defined(HAVE_PROTO_BSDSOCKET_H)
int connect(int, void*, int);
#endif
]],[[
@ -1858,6 +1880,11 @@ AC_DEFUN([CURL_CHECK_FUNC_SELECT], [
#endif
#endif
#ifndef HAVE_WINDOWS_H
#ifdef HAVE_PROTO_BSDSOCKET_H
#include <proto/bsdsocket.h>
struct Library *SocketBase = NULL;
#define select(a,b,c,d,e) WaitSelect(a,b,c,d,e,0)
#endif
#ifdef HAVE_SYS_SELECT_H
#include <sys/select.h>
#endif
@ -1916,6 +1943,11 @@ AC_DEFUN([CURL_CHECK_FUNC_SELECT], [
#endif
#endif
#ifndef HAVE_WINDOWS_H
#ifdef HAVE_PROTO_BSDSOCKET_H
#include <proto/bsdsocket.h>
struct Library *SocketBase = NULL;
#define select(a,b,c,d,e) WaitSelect(a,b,c,d,e,0)
#endif
#ifdef HAVE_SYS_SELECT_H
#include <sys/select.h>
#endif
@ -1930,15 +1962,14 @@ AC_DEFUN([CURL_CHECK_FUNC_SELECT], [
long tv_usec;
};
#endif
#ifndef HAVE_PROTO_BSDSOCKET_H
extern $sel_retv SELECTCALLCONV
#ifdef __ANDROID__
__attribute__((overloadable))
#endif
select($sel_arg1,
select($sel_arg1,
$sel_arg234,
$sel_arg234,
$sel_arg234,
$sel_arg5);
#endif
]],[[
$sel_arg1 nfds=0;
$sel_arg234 rfds=0;

1
curl/aclocal.m4 vendored
View file

@ -1169,7 +1169,6 @@ AC_SUBST([am__tar])
AC_SUBST([am__untar])
]) # _AM_PROG_TAR
m4_include([m4/ax_code_coverage.m4])
m4_include([m4/ax_compile_check_sizeof.m4])
m4_include([m4/curl-compilers.m4])
m4_include([m4/curl-confopts.m4])

View file

@ -6,7 +6,7 @@ rem * / __| | | | |_) | |
rem * | (__| |_| | _ <| |___
rem * \___|\___/|_| \_\_____|
rem *
rem * Copyright (C) 1998 - 2017, Daniel Stenberg, <daniel@haxx.se>, et al.
rem * Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
rem *
rem * This software is licensed as described in the file COPYING, which
rem * you should have received as part of this distribution. The terms
@ -212,7 +212,7 @@ rem
copy /Y src\tool_hugehelp.c.cvs src\tool_hugehelp.c 1>NUL 2>&1
) else (
echo #include "tool_setup.h"> src\tool_hugehelp.c
echo #include "tool_hugehelp.hd">> src\tool_hugehelp.c
echo #include "tool_hugehelp.h">> src\tool_hugehelp.c
echo.>> src\tool_hugehelp.c
echo void hugehelp(void^)>> src\tool_hugehelp.c
echo {>> src\tool_hugehelp.c

3310
curl/configure vendored

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

39
curl/docs/ALTSVC.md Normal file
View file

@ -0,0 +1,39 @@
# Alt-Svc
curl features **EXPERIMENTAL** support for the Alt-Svc: HTTP header.
## Enable Alt-Svc in build
`./configure --enable-alt-svc`
## Standard
[RFC 7838](https://tools.ietf.org/html/rfc7838)
# Alt-Svc cache file format
This a text based file with one line per entry and each line consists of nine
space separated fields.
## Example
h2 quic.tech 8443 h3-22 quic.tech 8443 "20190808 06:18:37" 0 0
## Fields
1. The ALPN id for the source origin
2. The host name for the source origin
3. The port number for the source origin
4. The ALPN id for the destination host
5. The host name for the destination host
6. The host number for the destination host
7. The expiration date and time of this entry withing double quotes. The date format is "YYYYMMDD HH:MM:SS" and the time zone is GMT.
8. Boolean (1 or 0) if "persist" was set for this entry
9. Integer priority value (not currently used)
# TODO
- handle multiple response headers, when one of them says `clear` (should
override them all)
- using `Age:` value for caching age as per spec
- `CURLALTSVC_IMMEDIATELY` support

View file

@ -23,6 +23,8 @@ Requests](https://github.com/whoshuu/cpr) by Huu Nguyen
Cocoa: [BBHTTP](https://github.com/brunodecarvalho/BBHTTP) written by Bruno de Carvalho
[curlhandle](https://github.com/karelia/curlhandle) Written by Dan Wood
Clojure: [clj-curl](https://github.com/lsevero/clj-curl) by Lucas Severo
[D](https://dlang.org/library/std/net/curl.html) Written by Kenneth Bogert
[Delphi](https://github.com/Mercury13/curl4delphi) Written by Mikhail Merkuryev
@ -53,6 +55,8 @@ Go: [go-curl](https://github.com/andelf/go-curl) by ShuYu Wang
[Julia](https://github.com/forio/Curl.jl) Written by Paul Howe
[Kapito](https://github.com/puzza007/katipo) is an Erlang HTTP library around libcurl.
[Lisp](https://common-lisp.net/project/cl-curl/) Written by Liam Healy
Lua: [luacurl](http://luacurl.luaforge.net/) by Alexander Marinov, [Lua-cURL](https://github.com/Lua-cURL) by Jürgen Hötzel
@ -61,6 +65,8 @@ Lua: [luacurl](http://luacurl.luaforge.net/) by Alexander Marinov, [Lua-cURL](ht
[.NET](https://sourceforge.net/projects/libcurl-net/) libcurl-net by Jeffrey Phillips
[Nim](https://nimble.directory/pkg/libcurl) wrapper for libcurl
[node.js](https://github.com/JCMais/node-libcurl) node-libcurl by Jonathan Cardoso Machado
[Object-Pascal](https://web.archive.org/web/20020610214926/www.tekool.com/opcurl) Free Pascal, Delphi and Kylix binding written by Christophe Espern.
@ -69,14 +75,17 @@ Lua: [luacurl](http://luacurl.luaforge.net/) by Alexander Marinov, [Lua-cURL](ht
[Pascal](https://web.archive.org/web/20030804091414/houston.quik.com/jkp/curlpas/) Free Pascal, Delphi and Kylix binding written by Jeffrey Pohlmeyer.
Perl: [WWW--Curl](https://github.com/szbalint/WWW--Curl) Maintained by Cris
Perl: [WWW::Curl](https://github.com/szbalint/WWW--Curl) Maintained by Cris
Bailiff and Bálint Szilakszi,
[perl6-net-curl](https://github.com/azawawi/perl6-net-curl) by Ahmad M. Zawawi
[NET::Curl](https://metacpan.org/pod/Net::Curl) by Przemyslaw Iskra
[PHP](https://php.net/curl) Originally written by Sterling Hughes
[PostgreSQL](https://github.com/pramsey/pgsql-http) - HTTP client for PostgreSQL
[PureBasic](https://www.purebasic.com/documentation/http/index.html) uses libcurl in its "native" HTTP subsystem
[Python](http://pycurl.io/) PycURL by Kjetil Jacobsen
[R](https://cran.r-project.org/package=curl)

106
curl/docs/BUG-BOUNTY.md Normal file
View file

@ -0,0 +1,106 @@
# The curl bug bounty
The curl project runs a bug bounty program in association with
[HackerOne](https://www.hackerone.com) and the [Internet Bug
Bounty](https://internetbugbounty.org).
# How does it work?
Start out by posting your suspected security vulnerability directly to [curl's
HackerOne program](https://hackerone.com/curl).
After you have reported a security issue, it has been deemed credible, and a
patch and advisory has been made public, you may be eligible for a bounty from
this program.
See all details at [https://hackerone.com/curl](https://hackerone.com/curl)
This bounty is relying on funds from sponsors. If you use curl professionally,
consider help funding this! See
[https://opencollective.com/curl](https://opencollective.com/curl) for
details.
# What are the reward amounts?
The curl projects offer monetary compensation for reported and published
security vulnerabilities. The amount of money that is rewarded depends on how
serious the flaw is determined to be.
We offer reward money *up to* a certain amount per severity. The curl security
team determines the severity of each reported flaw on a case by case basis and
the exact amount rewarded to the reporter is then decided.
Check out the current award amounts at [https://hackerone.com/curl](https://hackerone.com/curl)
# Who is eligible for a reward?
Everyone and anyone who reports a security problem in a released curl version
that hasn't already been reported can ask for a bounty.
Vulnerabilities in features that are off by default and documented as
experimental are not eligible for a reward.
The vulnerability has to be fixed and publicly announced (by the curl project)
before a bug bounty will be considered.
Bounties need to be requested within twelve months from the publication of the
vulnerability.
The vulnerabilities must not have been made public before February 1st, 2019.
We do not retroactively pay for old, already known, or published security
problems.
# Product vulnerabilities only
This bug bounty only concerns the curl and libcurl products and thus their
respective source codes - when running on existing hardware. It does not
include documentation, websites, or other infrastructure.
The curl security team will be the sole arbiter if a reported flaw can be
subject to a bounty or not.
# How are vulnerabilities graded?
The grading of each reported vulnerability that makes a reward claim will be
performed by the curl security team. The grading will be based on the CVSS
(Common Vulnerability Scoring System) 3.0.
# How are reward amounts determined?
The curl security team first gives the vulnerability a score, as mentioned
above, and based on that level we set an amount depending on the specifics of
the individual case. Other sponsors of the program might also get involved and
can raise the amounts depending on the particular issue.
# What happens if the bounty fund is drained?
The bounty fund depends on sponsors. If we pay out more bounties than we add,
the fund will eventually drain. If that end up happening, we will simply not
be able to pay out as high bounties as we would like and hope that we can
convince new sponsors to help us top up the fund again.
# Regarding taxes, etc. on the bounties
In the event that the individual receiving a curl bug bounty needs to pay
taxes on the reward money, the responsibility lies with the receiver. The
curl project or its security team never actually receive any of this money,
hold the money, or pay out the money.
## Bonus levels
In cooperation with [Dropbox](https://www.dropbox.com) the curl bug bounty can
offer the highest levels of rewards if the issue covers one of the interest
areas of theirs - and only if the bug is graded *high* or *critical*. A
non-exhaustive list of vulnerabilities Dropbox is interested in are:
- RCE
- URL parsing vulnerabilities with demonstrable security impact
Dropbox would generally hand out rewards for critical vulnerabilities ranging
from 12k-32k USD where RCE is on the upper end of the spectrum.
URL parsing vulnerabilities with demonstrable security impact might include
incorrectly determining the authority of a URL when a special character is
inserted into the path of the URL (as a hypothetical). This type of
vulnerability would likely yield 6k-12k unless further impact could be
demonstrated.

View file

@ -61,9 +61,14 @@ BUGS
using our security development process.
Security related bugs or bugs that are suspected to have a security impact,
should be reported by email to curl-security@haxx.se so that they first can
be dealt with away from the public to minimize the harm and impact it will
have on existing users out there who might be using the vulnerable versions.
should be reported on the curl security tracker at HackerOne:
https://hackerone.com/curl
This ensures that the report reaches the curl security team so that they
first can be deal with the report away from the public to minimize the harm
and impact it will have on existing users out there who might be using the
vulnerable versions.
The curl project's process for handling security related issues is
documented here:

View file

@ -6,11 +6,12 @@ and
[`--ciphers`](https://curl.haxx.se/docs/manpage.html#--ciphers)
users can control which ciphers to consider when negotiating TLS connections.
TLS 1.3 ciphers are supported since curl 7.61 with options
TLS 1.3 ciphers are supported since curl 7.61 for OpenSSL 1.1.1+ with options
[`CURLOPT_TLS13_CIPHERS`](https://curl.haxx.se/libcurl/c/CURLOPT_TLS13_CIPHERS.html)
and
[`--tls13-ciphers`](https://curl.haxx.se/docs/manpage.html#--tls13-ciphers)
.
. If you are using a different SSL backend you can try setting TLS 1.3 cipher
suites by using the respective regular cipher option.
The names of the known ciphers differ depending on which TLS backend that
libcurl was built to use. This is an attempt to list known cipher names.
@ -269,9 +270,16 @@ When specifying multiple cipher names, separate them with colon (`:`).
`ecdhe_ecdsa_chacha20_poly1305_sha_256`
`dhe_rsa_chacha20_poly1305_sha_256`
### TLS 1.3 cipher suites
`aes_128_gcm_sha_256`
`aes_256_gcm_sha_384`
`chacha20_poly1305_sha_256`
## GSKit
Ciphers are internally defined as numeric codes (https://www.ibm.com/support/knowledgecenter/ssw_ibm_i_73/apis/gsk_attribute_set_buffer.htm),
Ciphers are internally defined as
[numeric codes](https://www.ibm.com/support/knowledgecenter/ssw_ibm_i_73/apis/gsk_attribute_set_buffer.htm),
but libcurl maps them to the following case-insensitive names.
### SSL2 cipher suites (insecure: disabled by default)
@ -446,9 +454,18 @@ but libcurl maps them to the following case-insensitive names.
`DHE-PSK-CHACHA20-POLY1305`,
`EDH-RSA-DES-CBC3-SHA`,
## WinSSL
## Schannel
WinSSL allows the enabling and disabling of encryption algorithms, but not specific ciphersuites. They are defined by Microsoft (https://msdn.microsoft.com/en-us/library/windows/desktop/aa375549(v=vs.85).aspx)
Schannel allows the enabling and disabling of encryption algorithms, but not
specific ciphersuites. They are
[defined](https://docs.microsoft.com/windows/desktop/SecCrypto/alg-id) by
Microsoft.
There is also the case that the selected algorithm is not supported by the
protocol or does not match the ciphers offered by the server during the SSL
negotiation. In this case curl will return error
`CURLE_SSL_CONNECT_ERROR (35) SEC_E_ALGORITHM_MISMATCH`
and the request will fail.
`CALG_MD2`,
`CALG_MD4`,
@ -496,3 +513,4 @@ WinSSL allows the enabling and disabling of encryption algorithms, but not speci
`CALG_ECDH`,
`CALG_ECMQV`,
`CALG_ECDSA`,
`CALG_ECDH_EPHEM`,

View file

@ -9,8 +9,8 @@ style is more important than individual contributors having their own personal
tastes satisfied.
Our C code has a few style rules. Most of them are verified and upheld by the
"lib/checksrc.pl" script. Invoked with "make checksrc" or even by default by
the build system when built after "./configure --enable-debug" has been used.
`lib/checksrc.pl` script. Invoked with `make checksrc` or even by default by
the build system when built after `./configure --enable-debug` has been used.
It is normally not a problem for anyone to follow the guidelines, as you just
need to copy the style already used in the source code and there are no
@ -227,7 +227,7 @@ Align with the "current open" parenthesis:
Use **#ifdef HAVE_FEATURE** to do conditional code. We avoid checking for
particular operating systems or hardware in the #ifdef lines. The HAVE_FEATURE
shall be generated by the configure script for unix-like systems and they are
hard-coded in the config-[system].h files for the others.
hard-coded in the `config-[system].h` files for the others.
We also encourage use of macros/functions that possibly are empty or defined
to constants when libcurl is built without that feature, to make the code

View file

@ -20,8 +20,8 @@ Before posting to one of the curl mailing lists, please read up on the
We also hang out on IRC in #curl on irc.freenode.net
If you're at all interested in the code side of things, consider clicking
'watch' on the [curl repo on github](https://github.com/curl/curl) to get
notified on pull requests and new issues posted there.
'watch' on the [curl repo on github](https://github.com/curl/curl) to be
notified of pull requests and new issues posted there.
### License and copyright
@ -155,7 +155,7 @@ to loose in the flood of many emails, like they sometimes do on the mailing
lists.
Every pull request submitted will automatically be tested in several different
ways. Every pull request is verfied that:
ways. Every pull request is verified for each of the following:
- ... it still builds, warning-free, on Linux and macOS, with both
clang and gcc

View file

@ -5,62 +5,32 @@ email the curl-library mailing list as soon as possible and explain to us why
this is a problem for you and how your use case can't be satisfied properly
using a work around.
## HTTP pipelining
## PolarSSL
HTTP pipelining is badly supported by curl in the sense that we have bugs and
it is a fragile feature without enough tests. Also, when something turns out
to have problems it is really tricky to debug due to the timing sensitivity so
very often enabling debug outputs or similar completely changes the nature of
the behavior and things are not reproducing anymore!
The polarssl TLS library has not had an update in over three years. The last
release was done on [January 7
2016](https://tls.mbed.org/tech-updates/releases). This library has been
superseded by the mbedTLS library, which is the current incarnation of
PolarSSL. curl has supported mbedTLS since 2015.
HTTP pipelining was never enabled by default by the large desktop browsers due
to all the issues with it. Both Firefox and Chrome have also dropped
pipelining support entirely since a long time back now. We are in fact over
time becoming more and more lonely in supporting pipelining.
It seems unlikely that this library is a good choice for users to get proper
TLS security and support today and at the same time there are plenty of good
and updated alternatives.
The bad state of HTTP pipelining was a primary driving factor behind HTTP/2
and its multiplexing feature. HTTP/2 multiplexing is truly and really
"pipelining done right". It is way more solid, practical and solves the use
case in a better way with better performance and fewer downsides and problems.
In 2018, pipelining *should* be abandoned and HTTP/2 should be used instead.
I consider it likely that the existing users of curl + polarssl out there are
stuck on old curl versions and when they eventually manage to update curl they
should also be able to update their TLS library.
### State
In 7.62.0, we will add code that ignores the "enable pipeline" option
setting). The *setopt() function would still return "OK" though so the
application couldn't tell that this is happening.
Users who truly need pipelining from that version will need to modify the code
(ever so slightly) and rebuild.
In the curl 7.65.2 release (July 17, 2019) the ability to build with this TLS
backend is removed from the configure script. The code remains and can be
built and used going forward, but it has to be manually enabled in a build (or
the configure removal reverted).
### Removal
Six months later, in sync with the planned release happen in April 2019,
(might be 7.66.0), assuming no major riots have occurred due to this in the
mean time, we rip out the pipelining code. It is in the order of 1000 lines of
libcurl code.
Left to answer: should the *setopt() function start to return error when these
options are set to be able to tell when they're trying to use options that are
no longer around or should we maintain behavior as much as possible?
## `CURLOPT_DNS_USE_GLOBAL_CACHE`
This option makes libcurl use a global non-thread-safe cache for DNS if
enabled. The option has been marked as "obsolete" in the header file and in
documentation for several years already.
There's proper and safe method alternative provided since many years: the
share API.
### State
In curl 7.62.0 setting this option to TRUE will not have any effect. The
global cache will not be enabled. The code still remains so it is easy to
revert if need be.
### Removal
Remove all global-cache related code from curl around April 2019 (might be
7.66.0).
The support for PolarSSL and all code for it will be completely removed from
the curl code base six months after it ships disabled in configure in a
release. In the release on or near February 27, 2020. (possibly called curl
7.70.0).

139
curl/docs/ESNI.md Normal file
View file

@ -0,0 +1,139 @@
# TLS: ESNI support in curl and libcurl
## Summary
**ESNI** means **Encrypted Server Name Indication**, a TLS 1.3
extension which is currently the subject of an
[IETF Draft][tlsesni].
This file is intended to show the latest current state of ESNI support
in **curl** and **libcurl**.
At end of August 2019, an [experimental fork of curl][niallorcurl],
built using an [experimental fork of OpenSSL][sftcdopenssl], which in
turn provided an implementation of ESNI, was demonstrated
interoperating with a server belonging to the [DEfO
Project][defoproj].
Further sections here describe
- resources needed for building and demonstrating **curl** support
for ESNI,
- progress to date,
- TODO items, and
- additional details of specific stages of the progress.
## Resources needed
To build and demonstrate ESNI support in **curl** and/or **libcurl**,
you will need
- a TLS library, supported by **libcurl**, which implements ESNI;
- an edition of **curl** and/or **libcurl** which supports the ESNI
implementation of the chosen TLS library;
- an environment for building and running **curl**, and at least
building **OpenSSL**;
- a server, supporting ESNI, against which to run a demonstration
and perhaps a specific target URL;
- some instructions.
The following set of resources is currently known to be available.
| Set | Component | Location | Remarks |
|:-----|:-------------|:------------------------------|:-------------------------------------------|
| DEfO | TLS library | [sftcd/openssl][sftcdopenssl] | Tag *esni-2019-08-30* avoids bleeding edge |
| | curl fork | [niallor/curl][niallorcurl] | Tag *esni-2019-08-30* likewise |
| | instructions | [ESNI-README][niallorreadme] | |
## Progress
### PR 4011 (Jun 2019) expected in curl release 7.67.0 (Oct 2019)
- Details [below](#pr4011);
- New **curl** feature: `CURL_VERSION_ESNI`;
- New configuration option: `--enable-esni`;
- Build-time check for availability of resources needed for ESNI
support;
- Pre-processor symbol `USE_ESNI` for conditional compilation of
ESNI support code, subject to configuration option and
availability of needed resources.
## TODO
- (next PR) Add libcurl options to set ESNI parameters.
- (next PR) Add curl tool command line options to set ESNI parameters.
- (WIP) Extend DoH functions so that published ESNI parameters can be
retrieved from DNS instead of being required as options.
- (WIP) Work with OpenSSL community to finalize ESNI API.
- Track OpenSSL ESNI API in libcurl
- Identify and implement any changes needed for CMake.
- Optimize build-time checking of available resources.
- Encourage ESNI support work on other TLS/SSL backends.
## Additional detail
### PR 4011
**TLS: Provide ESNI support framework for curl and libcurl**
The proposed change provides a framework to facilitate work to
implement ESNI support in curl and libcurl. It is not intended
either to provide ESNI functionality or to favour any particular
TLS-providing backend. Specifically, the change reserves a
feature bit for ESNI support (symbol `CURL_VERSION_ESNI`),
implements setting and reporting of this bit, includes dummy
book-keeping for the symbol, adds a build-time configuration
option (`--enable-esni`), provides an extensible check for
resources available to provide ESNI support, and defines a
compiler pre-processor symbol (`USE_ESNI`) accordingly.
Proposed-by: @niallor (Niall O'Reilly)\
Encouraged-by: @sftcd (Stephen Farrell)\
See-also: [this message](https://curl.haxx.se/mail/lib-2019-05/0108.html)
Limitations:
- Book-keeping (symbols-in-versions) needs real release number, not 'DUMMY'.
- Framework is incomplete, as it covers autoconf, but not CMake.
- Check for available resources, although extensible, refers only to
specific work in progress ([described
here](https://github.com/sftcd/openssl/tree/master/esnistuff)) to
implement ESNI for OpenSSL, as this is the immediate motivation
for the proposed change.
## References
CloudFlare blog: [Encrypting SNI: Fixing One of the Core Internet Bugs][corebug]
Cloudflare blog: [Encrypt it or lose it: how encrypted SNI works][esniworks]
IETF Draft: [Encrypted Server Name Indication for TLS 1.3][tlsesni]
---
[tlsesni]: https://datatracker.ietf.org/doc/draft-ietf-tls-esni/
[esniworks]: https://blog.cloudflare.com/encrypted-sni/
[corebug]: https://blog.cloudflare.com/esni/
[defoproj]: https://defo.ie/
[sftcdopenssl]: https://github.com/sftcd/openssl/
[niallorcurl]: https://github.com/niallor/curl/
[niallorreadme]: https://github.com/niallor/curl/blob/master/ESNI-README.md

22
curl/docs/EXPERIMENTAL.md Normal file
View file

@ -0,0 +1,22 @@
# Experimental
Some features and functionality in curl and libcurl are considered
**EXPERIMENTAL**.
Experimental support in curl means:
1. Experimental features are provided to allow users to try them out and
provide feedback on functionality and API etc before they ship and get
"carved in stone".
2. You must enable the feature when invoking configure as otherwise curl will
not be built with the feature present.
3. We strongly advice against using this feature in production.
4. **We reserve the right to change behavior** of the feature without sticking
to our API/ABI rules as we do for regular features, as long as it is marked
experimental.
5. Experimental features are clearly marked so in documentation. Beware.
## Experimental features right now
- HTTP/3 support and options
- alt-svc support and options

View file

@ -43,8 +43,8 @@ FAQ
3.9 How do I use curl in my favorite programming language?
3.10 What about SOAP, WebDAV, XML-RPC or similar protocols over HTTP?
3.11 How do I POST with a different Content-Type?
3.12 Why do FTP specific features over HTTP proxy fail?
3.13 Why does my single/double quotes fail?
3.12 Why do FTP-specific features over HTTP proxy fail?
3.13 Why do my single/double quotes fail?
3.14 Does curl support Javascript or PAC (automated proxy config)?
3.15 Can I do recursive fetches with curl?
3.16 What certificates do I need when I use SSL?
@ -72,7 +72,7 @@ FAQ
4.8 I found a bug!
4.9 Curl can't authenticate to the server that requires NTLM?
4.10 My HTTP request using HEAD, PUT or DELETE doesn't work!
4.11 Why does my HTTP range requests return the full document?
4.11 Why do my HTTP range requests return the full document?
4.12 Why do I get "certificate verify failed" ?
4.13 Why is curl -R on Windows one hour off?
4.14 Redirects work in browser but not with curl!
@ -253,11 +253,10 @@ FAQ
any way by the project.
We still get help from companies. Haxx provides web site, bandwidth, mailing
lists etc, sourceforge.net hosts project services we take advantage from,
like the bug tracker, and GitHub hosts the primary git repository at
https://github.com/curl/curl. Also again, some companies have sponsored
certain parts of the development in the past and I hope some will continue to
do so in the future.
lists etc, GitHub hosts the primary git repository and other services like
the bug tracker at https://github.com/curl/curl. Also again, some companies
have sponsored certain parts of the development in the past and I hope some
will continue to do so in the future.
If you want to support our project, consider a donation or a banner-program
or even better: by helping us with coding, documenting or testing etc.
@ -447,10 +446,10 @@ FAQ
backends.
curl can be built to use one of the following SSL alternatives: OpenSSL,
GnuTLS, yassl, NSS, PolarSSL, MesaLink, Secure Transport (native iOS/OS X),
WinSSL (native Windows) or GSKit (native IBM i). They all have their pros
and cons, and we try to maintain a comparison of them here:
https://curl.haxx.se/docs/ssl-compared.html
libressl, BoringSSL, GnuTLS, wolfSSL, NSS, mbedTLS, MesaLink, Secure
Transport (native iOS/OS X), Schannel (native Windows) or GSKit (native IBM
i). They all have their pros and cons, and we try to maintain a comparison
of them here: https://curl.haxx.se/docs/ssl-compared.html
2.3 Where can I find a copy of LIBEAY32.DLL?
@ -484,7 +483,7 @@ FAQ
and logs and check out why the configure script doesn't find the SSL libs
and/or include files.
Also, check out the other paragraph in this FAQ labelled "configure doesn't
Also, check out the other paragraph in this FAQ labeled "configure doesn't
find OpenSSL even when it is installed".
3.2 How do I tell curl to resume a transfer?
@ -558,10 +557,9 @@ FAQ
3.9 How do I use curl in my favorite programming language?
There exist many language interfaces/bindings for curl that integrates it
better with various languages. If you are fluid in a script language, you
may very well opt to use such an interface instead of using the command line
tool.
Many programming languages have interfaces/bindings that allow you to use
curl without having to use the command line tool. If you are fluent in such
a language, you may prefer to use one of these interfaces instead.
Find out more about which languages that support curl directly, and how to
install and use them, in the libcurl section of the curl web site:
@ -573,13 +571,14 @@ FAQ
about bindings on the curl-library list too, but be prepared that people on
that list may not know anything about bindings.
In October 2009, there were interfaces available for the following
languages: Ada95, Basic, C, C++, Ch, Cocoa, D, Dylan, Eiffel, Euphoria,
Ferite, Gambas, glib/GTK+, Haskell, ILE/RPG, Java, Lisp, Lua, Mono, .NET,
Object-Pascal, OCaml, Pascal, Perl, PHP, PostgreSQL, Python, R, Rexx, Ruby,
Scheme, S-Lang, Smalltalk, SP-Forth, SPL, Tcl, Visual Basic, Visual FoxPro,
Q, wxwidgets and XBLite. By the time you read this, additional ones may have
appeared!
In February 2019, there were interfaces available for the following
languages: Ada95, Basic, C, C++, Ch, Cocoa, D, Delphi, Dylan, Eiffel,
Euphoria, Falcon, Ferite, Gambas, glib/GTK+, Go, Guile, Harbour, Haskell,
Java, Julia, Lisp, Lua, Mono, .NET, node.js, Object-Pascal, OCaml, Pascal,
Perl, PHP, PostgreSQL, Python, R, Rexx, Ring, RPG, Ruby, Rust, Scheme,
Scilab, S-Lang, Smalltalk, SP-Forth, SPL, Tcl, Visual Basic, Visual FoxPro,
Q, wxwidgets, XBLite and Xoho. By the time you read this, additional ones
may have appeared!
3.10 What about SOAP, WebDAV, XML-RPC or similar protocols over HTTP?
@ -598,11 +597,11 @@ FAQ
curl -d "datatopost" -H "Content-Type: text/xml" [URL]
3.12 Why do FTP specific features over HTTP proxy fail?
3.12 Why do FTP-specific features over HTTP proxy fail?
Because when you use a HTTP proxy, the protocol spoken on the network will
be HTTP, even if you specify a FTP URL. This effectively means that you
normally can't use FTP specific features such as FTP upload and FTP quote
normally can't use FTP-specific features such as FTP upload and FTP quote
etc.
There is one exception to this rule, and that is if you can "tunnel through"
@ -610,7 +609,7 @@ FAQ
and is generally not available as proxy admins usually disable tunneling to
ports other than 443 (which is used for HTTPS access through proxies).
3.13 Why does my single/double quotes fail?
3.13 Why do my single/double quotes fail?
To specify a command line option that includes spaces, you might need to
put the entire option within quotes. Like in:
@ -746,7 +745,7 @@ FAQ
directory, you get the actual root directory.
To specify a file in your user's home directory, you need to use the correct
URL syntax which for sftp might look similar to:
URL syntax which for SFTP might look similar to:
curl -O -u user:password sftp://example.com/~/file.txt
@ -895,7 +894,7 @@ FAQ
<H1>Moved Permanently</H1> The document has moved <A
HREF="http://same_url_now_with_a_trailing_slash/">here</A>.
it might be because you request a directory URL but without the trailing
it might be because you requested a directory URL but without the trailing
slash. Try the same operation again _with_ the trailing URL, or use the
-L/--location option to follow the redirection.
@ -926,8 +925,8 @@ FAQ
anyone would call security.
Also note that regular HTTP (using Basic authentication) and FTP passwords
are sent in clear across the network. All it takes for anyone to fetch them
is to listen on the network. Eavesdropping is very easy. Use more secure
are sent as cleartext across the network. All it takes for anyone to fetch
them is to listen on the network. Eavesdropping is very easy. Use more secure
authentication methods (like Digest, Negotiate or even NTLM) or consider the
SSL-based alternatives HTTPS and FTPS.
@ -962,7 +961,7 @@ FAQ
software you're trying to interact with. This is not anything curl can do
anything about.
4.11 Why does my HTTP range requests return the full document?
4.11 Why do my HTTP range requests return the full document?
Because the range may not be supported by the server, or the server may
choose to ignore it and return the full document anyway.
@ -1012,8 +1011,8 @@ FAQ
redirects the browser to another given URL.
There is no way to make curl follow these redirects. You must either
manually figure out what the page is set to do, or you write a script that
parses the results and fetches the new URL.
manually figure out what the page is set to do, or write a script that parses
the results and fetches the new URL.
4.15 FTPS doesn't work
@ -1025,7 +1024,7 @@ FAQ
speak SSL. FTPS:// connections default to port 990.
To use explicit FTPS, you use a FTP:// URL and the --ftp-ssl option (or one
of its related flavours). This is the most common method, and the one
of its related flavors). This is the most common method, and the one
mandated by RFC4217. This kind of connection will then of course use the
standard FTP port 21 by default.
@ -1116,7 +1115,7 @@ FAQ
an embedded device with only a single network connection) may want to act
immediately if its lone network connection goes down. That can be achieved
by having the application monitor the network connection on its own using an
OS-specific mechanism, then signalling libcurl to abort (see also item 5.13).
OS-specific mechanism, then signaling libcurl to abort (see also item 5.13).
4.20 curl doesn't return error for HTTP non-200 responses!

View file

@ -97,7 +97,7 @@ Anyone can aspire to become a curl maintainer.
### Duties
There are no mandatory duties. We hope and wish that maintainers consider
reviewing patches and help merching them, especially when the changes are
reviewing patches and help merging them, especially when the changes are
within the area of personal expertise and experience.
### Requirements
@ -108,7 +108,7 @@ within the area of personal expertise and experience.
### Recommendations
- please enable 2fa on your github account to reduce risk of malicious sourc
- please enable 2fa on your github account to reduce risk of malicious source
code tampering
- consider enabling signed git commits for additional verification of changes

View file

@ -18,7 +18,16 @@
original [Netscape spec from 1994](https://curl.haxx.se/rfc/cookie_spec.html).
In 2011, [RFC6265](https://www.ietf.org/rfc/rfc6265.txt) was finally
published and details how cookies work within HTTP.
published and details how cookies work within HTTP. In 2016, an update which
added support for prefixes was
[proposed](https://tools.ietf.org/html/draft-ietf-httpbis-cookie-prefixes-00),
and in 2017, another update was
[drafted](https://tools.ietf.org/html/draft-ietf-httpbis-cookie-alone-01)
to deprecate modification of 'secure' cookies from non-secure origins. Both
of these drafs have been incorporated into a proposal to
[replace](https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-02)
RFC6265. Cookie prefixes and secure cookie modification protection has been
implemented by curl.
## Cookies saved to disk

115
curl/docs/HTTP3.md Normal file
View file

@ -0,0 +1,115 @@
# HTTP3 (and QUIC)
## Resources
[HTTP/3 Explained](https://daniel.haxx.se/http3-explained/) - the online free
book describing the protocols involved.
[QUIC implementation](https://github.com/curl/curl/wiki/QUIC-implementation) -
the wiki page describing the plan for how to support QUIC and HTTP/3 in curl
and libcurl.
[quicwg.org](https://quicwg.org/) - home of the official protocol drafts
## QUIC libraries
QUIC libraries we're experiementing with:
[ngtcp2](https://github.com/ngtcp2/ngtcp2)
[quiche](https://github.com/cloudflare/quiche)
## Experimental!
HTTP/3 and QUIC support in curl is considered **EXPERIMENTAL** until further
notice. It needs to be enabled at build-time.
Further development and tweaking of the HTTP/3 support in curl will happen in
in the master branch using pull-requests, just like ordinary changes.
# ngtcp2 version
## Build
Build (patched) OpenSSL
% git clone --depth 1 -b openssl-quic-draft-23 https://github.com/tatsuhiro-t/openssl
% cd openssl
% ./config enable-tls1_3 --prefix=<somewhere1>
% make
% make install_sw
Build nghttp3
% cd ..
% git clone https://github.com/ngtcp2/nghttp3
% cd nghttp3
% autoreconf -i
% ./configure --prefix=<somewhere2> --enable-lib-only
% make
% make install
Build ngtcp2
% cd ..
% git clone https://github.com/ngtcp2/ngtcp2
% cd ngtcp2
% autoreconf -i
% ./configure PKG_CONFIG_PATH=<somewhere1>/lib/pkgconfig:<somewhere2>/lib/pkgconfig LDFLAGS="-Wl,-rpath,<somewhere1>/lib" --prefix=<somewhere3>
% make
% make install
Build curl
% cd ..
% git clone https://github.com/curl/curl
% cd curl
% ./buildconf
% LDFLAGS="-Wl,-rpath,<somewhere1>/lib" ./configure --with-ssl=<somewhere1> --with-nghttp3=<somewhere2> --with-ngtcp2=<somewhere3>
% make
# quiche version
## build
Clone quiche and BoringSSL:
% git clone --recursive https://github.com/cloudflare/quiche
Build BoringSSL (it needs to be built manually so it can be reused with curl):
% cd quiche/deps/boringssl
% mkdir build
% cd build
% cmake -DCMAKE_POSITION_INDEPENDENT_CODE=on ..
% make
% cd ..
% mkdir -p .openssl/lib
% cp build/crypto/libcrypto.a build/ssl/libssl.a .openssl/lib
% ln -s $PWD/include .openssl
Build quiche:
% cd ../..
% QUICHE_BSSL_PATH=$PWD/deps/boringssl cargo build --release --features pkg-config-meta
Clone and build curl:
% cd ..
% git clone https://github.com/curl/curl
% cd curl
% ./buildconf
% ./configure LDFLAGS="-Wl,-rpath,$PWD/../quiche/target/release" --with-ssl=$PWD/../quiche/deps/boringssl/.openssl --with-quiche=$PWD/../quiche/target/release
% make
## Run
Use HTTP/3 directly:
curl --http3 https://nghttp2.org:8443/
Upgrade via Alt-Svc:
curl --alt-svc altsvc.cache https://quic.aiortc.org/
See this [list of public HTTP/3 servers](https://bagder.github.io/HTTP3-test/)

View file

@ -7,6 +7,18 @@ document does not describe how to install curl or libcurl using such a binary
package. This document describes how to compile, build and install curl and
libcurl from source code.
## Building using vcpkg
You can download and install curl and libcurl using the [vcpkg](https://github.com/Microsoft/vcpkg/) dependency manager:
git clone https://github.com/Microsoft/vcpkg.git
cd vcpkg
./bootstrap-vcpkg.sh
./vcpkg integrate install
vcpkg install curl[tool]
The curl port in vcpkg is kept up to date by Microsoft team members and community contributors. If the version is out of date, please [create an issue or pull request](https://github.com/Microsoft/vcpkg) on the vcpkg repository.
## Building from git
If you get your code off a git repository instead of a release tarball, see
@ -56,15 +68,15 @@ you have pkg-config installed, set the pkg-config path first, like this:
Without pkg-config installed, use this:
./configure --with-ssl=/opt/OpenSSL
./configure --with-ssl=/opt/OpenSSL
If you insist on forcing a build without SSL support, even though you may
have OpenSSL installed in your system, you can run configure like this:
./configure --without-ssl
./configure --without-ssl
If you have OpenSSL installed, but with the libraries in one place and the
header files somewhere else, you have to set the LDFLAGS and CPPFLAGS
header files somewhere else, you have to set the `LDFLAGS` and `CPPFLAGS`
environment variables prior to running configure. Something like this should
work:
@ -101,12 +113,12 @@ The default OpenSSL configure check will also detect and use BoringSSL or
libressl.
- GnuTLS: `--without-ssl --with-gnutls`.
- Cyassl: `--without-ssl --with-cyassl`
- wolfSSL: `--without-ssl --with-wolfssl`
- NSS: `--without-ssl --with-nss`
- PolarSSL: `--without-ssl --with-polarssl`
- mbedTLS: `--without-ssl --with-mbedtls`
- schannel: `--without-ssl --with-winssl`
- secure transport: `--without-ssl --with-darwinssl`
- schannel: `--without-ssl --with-schannel`
- secure transport: `--without-ssl --with-secure-transport`
- MesaLink: `--without-ssl --with-mesalink`
# Windows
@ -121,9 +133,9 @@ libressl.
KB140584 is a must for any Windows developer. Especially important is full
understanding if you are not going to follow the advice given above.
- [How To Use the C Run-Time](https://support.microsoft.com/kb/94248/en-us)
- [How to link with the correct C Run-Time CRT library](https://support.microsoft.com/kb/140584/en-us)
- [Potential Errors Passing CRT Objects Across DLL Boundaries](https://msdn.microsoft.com/en-us/library/ms235460)
- [How To Use the C Run-Time](https://support.microsoft.com/help/94248/how-to-use-the-c-run-time)
- [Run-Time Library Compiler Options](https://docs.microsoft.com/cpp/build/reference/md-mt-ld-use-run-time-library)
- [Potential Errors Passing CRT Objects Across DLL Boundaries](https://docs.microsoft.com/cpp/c-runtime-library/potential-errors-passing-crt-objects-across-dll-boundaries)
If your app is misbehaving in some strange way, or it is suffering from
memory corruption, before asking for further help, please try first to
@ -148,7 +160,7 @@ make targets available to build libcurl with more features, use:
and SSPI support.
If you have any problems linking libraries or finding header files, be sure
to verify that the provided "Makefile.m32" files use the proper paths, and
to verify that the provided `Makefile.m32` files use the proper paths, and
adjust as necessary. It is also possible to override these paths with
environment variables, for example:
@ -172,8 +184,8 @@ If you want to enable LDAPS support then set LDAPS=1.
## Cygwin
Almost identical to the unix installation. Run the configure script in the
curl source tree root with `sh configure`. Make sure you have the sh
executable in /bin/ or you'll see the configure fail toward the end.
curl source tree root with `sh configure`. Make sure you have the `sh`
executable in `/bin/` or you'll see the configure fail toward the end.
Run `make`
@ -200,9 +212,9 @@ protocols:
If you want to set any of these defines you have the following options:
- Modify lib/config-win32.h
- Modify lib/curl_setup.h
- Modify winbuild/Makefile.vc
- Modify `lib/config-win32.h`
- Modify `lib/curl_setup.h`
- Modify `winbuild/Makefile.vc`
- Modify the "Preprocessor Definitions" in the libcurl project
Note: The pre-processor settings can be found using the Visual Studio IDE
@ -213,12 +225,12 @@ versions.
## Using BSD-style lwIP instead of Winsock TCP/IP stack in Win32 builds
In order to compile libcurl and curl using BSD-style lwIP TCP/IP stack it is
necessary to make definition of preprocessor symbol USE_LWIPSOCK visible to
necessary to make definition of preprocessor symbol `USE_LWIPSOCK` visible to
libcurl and curl compilation processes. To set this definition you have the
following alternatives:
- Modify lib/config-win32.h and src/config-win32.h
- Modify winbuild/Makefile.vc
- Modify `lib/config-win32.h` and `src/config-win32.h`
- Modify `winbuild/Makefile.vc`
- Modify the "Preprocessor Definitions" in the libcurl project
Note: The pre-processor settings can be found using the Visual Studio IDE
@ -248,13 +260,13 @@ look for dynamic import symbols.
## Legacy Windows and SSL
WinSSL (specifically Schannel from Windows SSPI), is the native SSL library in
Windows. However, WinSSL in Windows <= XP is unable to connect to servers that
Schannel (from Windows SSPI), is the native SSL library in Windows. However,
Schannel in Windows <= XP is unable to connect to servers that
no longer support the legacy handshakes and algorithms used by those
versions. If you will be using curl in one of those earlier versions of
Windows you should choose another SSL backend such as OpenSSL.
# Apple iOS and Mac OS X
# Apple iOS and macOS
On modern Apple operating systems, curl can be built to use Apple's SSL/TLS
implementation, Secure Transport, instead of OpenSSL. To build with Secure
@ -269,12 +281,12 @@ the server. This, of course, includes the root certificates that ship with the
OS. The `--cert` and `--engine` options, and their libcurl equivalents, are
currently unimplemented in curl with Secure Transport.
For OS X users: In OS X 10.8 ("Mountain Lion"), Apple made a major overhaul to
the Secure Transport API that, among other things, added support for the newer
TLS 1.1 and 1.2 protocols. To get curl to support TLS 1.1 and 1.2, you must
build curl on Mountain Lion or later, or by using the equivalent SDK. If you
set the `MACOSX_DEPLOYMENT_TARGET` environmental variable to an earlier
version of OS X prior to building curl, then curl will use the new Secure
For macOS users: In OS X 10.8 ("Mountain Lion"), Apple made a major overhaul
to the Secure Transport API that, among other things, added support for the
newer TLS 1.1 and 1.2 protocols. To get curl to support TLS 1.1 and 1.2, you
must build curl on Mountain Lion or later, or by using the equivalent SDK. If
you set the `MACOSX_DEPLOYMENT_TARGET` environmental variable to an earlier
version of macOS prior to building curl, then curl will use the new Secure
Transport API on Mountain Lion and later, and fall back on the older API when
the same curl binary is executed on older cats. For example, running these
commands in curl's directory in the shell will build the code such that it
@ -288,7 +300,7 @@ will run on cats as old as OS X 10.6 ("Snow Leopard") (using bash):
Download and unpack the curl package.
'cd' to the new directory. (e.g. `cd curl-7.12.3`)
`cd` to the new directory. (e.g. `cd curl-7.12.3`)
Set environment variables to point to the cross-compile toolchain and call
configure with any options you need. Be sure and specify the `--host` and
@ -327,7 +339,7 @@ In some cases, you may be able to simplify the above commands to as little as:
There are a number of configure options that can be used to reduce the size of
libcurl for embedded applications where binary size is an important factor.
First, be sure to set the CFLAGS variable when configuring with any relevant
First, be sure to set the `CFLAGS` variable when configuring with any relevant
compiler optimization flags to reduce the size of the binary. For gcc, this
would mean at minimum the -Os option, and potentially the `-march=X`,
`-mdynamic-no-pic` and `-flto` options as well, e.g.
@ -360,8 +372,8 @@ use, here are some other flags that can reduce the size of the library:
The GNU compiler and linker have a number of options that can reduce the
size of the libcurl dynamic libraries on some platforms even further.
Specify them by providing appropriate CFLAGS and LDFLAGS variables on the
configure command-line, e.g.
Specify them by providing appropriate `CFLAGS` and `LDFLAGS` variables on
the configure command-line, e.g.
CFLAGS="-Os -ffunction-sections -fdata-sections
-fno-unwind-tables -fno-asynchronous-unwind-tables -flto"
@ -383,7 +395,7 @@ in a lower total size than dynamically linking.
Note that the curl test harness can detect the use of some, but not all, of
the `--disable` statements suggested above. Use will cause tests relying on
those features to fail. The test harness can be manually forced to skip the
relevant tests by specifying certain key words on the runtests.pl command
relevant tests by specifying certain key words on the `runtests.pl` command
line. Following is a list of appropriate key words:
- `--disable-cookies` !cookies

View file

@ -7,13 +7,13 @@ curl internals
- [Windows vs Unix](#winvsunix)
- [Library](#Library)
- [`Curl_connect`](#Curl_connect)
- [`Curl_do`](#Curl_do)
- [`multi_do`](#multi_do)
- [`Curl_readwrite`](#Curl_readwrite)
- [`Curl_done`](#Curl_done)
- [`multi_done`](#multi_done)
- [`Curl_disconnect`](#Curl_disconnect)
- [HTTP(S)](#http)
- [FTP](#ftp)
- [Kerberos](#kerberos)
- [Kerberos](#kerberos)
- [TELNET](#telnet)
- [FILE](#file)
- [SMB](#smb)
@ -34,10 +34,17 @@ curl internals
- [`curl_off_t`](#curl_off_t)
- [curlx](#curlx)
- [Content Encoding](#contentencoding)
- [hostip.c explained](#hostip)
- [`hostip.c` explained](#hostip)
- [Track Down Memory Leaks](#memoryleak)
- [`multi_socket`](#multi_socket)
- [Structs in libcurl](#structs)
- [Curl_easy](#Curl_easy)
- [connectdata](#connectdata)
- [Curl_multi](#Curl_multi)
- [Curl_handler](#Curl_handler)
- [conncache](#conncache)
- [Curl_share](#Curl_share)
- [CookieInfo](#CookieInfo)
<a name="intro"></a>
Intro
@ -66,7 +73,7 @@ git
Portability
===========
We write curl and libcurl to compile with C89 compilers. On 32bit and up
We write curl and libcurl to compile with C89 compilers. On 32-bit and up
machines. Most of libcurl assumes more or less POSIX compliance but that's
not a requirement.
@ -83,7 +90,7 @@ Dependencies
- libssh2 0.16
- c-ares 1.6.0
- libidn2 2.0.0
- cyassl 2.0.0
- wolfSSL 2.0.0
- openldap 2.0
- MIT Kerberos 1.2.4
- GSKit V5R3M0
@ -118,7 +125,7 @@ Build tools
- GNU M4 1.4
- perl 5.004
- roffit 0.5
- groff ? (any version that supports "groff -Tps -man [in] [out]")
- groff ? (any version that supports `groff -Tps -man [in] [out]`)
- ps2pdf (gs) ?
<a name="winvsunix"></a>
@ -132,7 +139,7 @@ Windows vs Unix
In curl, this is solved with defines and macros, so that the source looks
the same in all places except for the header file that defines them. The
macros in use are sclose(), sread() and swrite().
macros in use are `sclose()`, `sread()` and `swrite()`.
2. Windows requires a couple of init calls for the socket stuff.
@ -171,14 +178,14 @@ Library
There are plenty of entry points to the library, namely each publicly defined
function that libcurl offers to applications. All of those functions are
rather small and easy-to-follow. All the ones prefixed with `curl_easy` are
put in the lib/easy.c file.
put in the `lib/easy.c` file.
`curl_global_init()` and `curl_global_cleanup()` should be called by the
application to initialize and clean up global stuff in the library. As of
today, it can handle the global SSL initing if SSL is enabled and it can init
the socket layer on windows machines. libcurl itself has no "global" scope.
All printf()-style functions use the supplied clones in lib/mprintf.c. This
All printf()-style functions use the supplied clones in `lib/mprintf.c`. This
makes sure we stay absolutely platform independent.
[ `curl_easy_init()`][2] allocates an internal struct and makes some
@ -197,8 +204,8 @@ Library
`curl_multi_wait()`, and `curl_multi_perform()` until the transfer is done
and then returns.
Some of the most important key functions in url.c are called from multi.c
when certain key steps are to be made in the transfer operation.
Some of the most important key functions in `url.c` are called from
`multi.c` when certain key steps are to be made in the transfer operation.
<a name="Curl_connect"></a>
Curl_connect()
@ -206,32 +213,32 @@ Curl_connect()
Analyzes the URL, it separates the different components and connects to the
remote host. This may involve using a proxy and/or using SSL. The
`Curl_resolv()` function in lib/hostip.c is used for looking up host names
(it does then use the proper underlying method, which may vary between
platforms and builds).
`Curl_resolv()` function in `lib/hostip.c` is used for looking up host
names (it does then use the proper underlying method, which may vary
between platforms and builds).
When `Curl_connect` is done, we are connected to the remote site. Then it
is time to tell the server to get a document/file. `Curl_do()` arranges
this.
This function makes sure there's an allocated and initiated 'connectdata'
This function makes sure there's an allocated and initiated `connectdata`
struct that is used for this particular connection only (although there may
be several requests performed on the same connect). A bunch of things are
inited/inherited from the `Curl_easy` struct.
<a name="Curl_do"></a>
Curl_do()
<a name="multi_do"></a>
multi_do()
---------
`Curl_do()` makes sure the proper protocol-specific function is called. The
functions are named after the protocols they handle.
`multi_do()` makes sure the proper protocol-specific function is called.
The functions are named after the protocols they handle.
The protocol-specific functions of course deal with protocol-specific
negotiations and setup. They have access to the `Curl_sendf()` (from
lib/sendf.c) function to send printf-style formatted data to the remote
`lib/sendf.c`) function to send printf-style formatted data to the remote
host and when they're ready to make the actual file transfer they call the
`Curl_Transfer()` function (in lib/transfer.c) to setup the transfer and
returns.
`Curl_setup_transfer()` function (in `lib/transfer.c`) to setup the
transfer and returns.
If this DO function fails and the connection is being re-used, libcurl will
then close this connection, setup a new connection and re-issue the DO
@ -239,28 +246,24 @@ Curl_do()
we have discovered a dead connection before the DO function and thus we
might wrongly be re-using a connection that was closed by the remote peer.
Some time during the DO function, the `Curl_setup_transfer()` function must
be called with some basic info about the upcoming transfer: what socket(s)
to read/write and the expected file transfer sizes (if known).
<a name="Curl_readwrite"></a>
Curl_readwrite()
----------------
Called during the transfer of the actual protocol payload.
During transfer, the progress functions in lib/progress.c are called at
During transfer, the progress functions in `lib/progress.c` are called at
frequent intervals (or at the user's choice, a specified callback might get
called). The speedcheck functions in lib/speedcheck.c are also used to
called). The speedcheck functions in `lib/speedcheck.c` are also used to
verify that the transfer is as fast as required.
<a name="Curl_done"></a>
Curl_done()
<a name="multi_done"></a>
multi_done()
-----------
Called after a transfer is done. This function takes care of everything
that has to be done after a transfer. This function attempts to leave
matters in a state so that `Curl_do()` should be possible to call again on
matters in a state so that `multi_do()` should be possible to call again on
the same connection (in a persistent connection case). It might also soon
be closed with `Curl_disconnect()`.
@ -283,11 +286,12 @@ HTTP(S)
=======
HTTP offers a lot and is the protocol in curl that uses the most lines of
code. There is a special file (lib/formdata.c) that offers all the multipart
post functions.
code. There is a special file `lib/formdata.c` that offers all the
multipart post functions.
base64-functions for user+password stuff (and more) is in (lib/base64.c) and
all functions for parsing and sending cookies are found in (lib/cookie.c).
base64-functions for user+password stuff (and more) is in `lib/base64.c`
and all functions for parsing and sending cookies are found in
`lib/cookie.c`.
HTTPS uses in almost every case the same procedure as HTTP, with only two
exceptions: the connect procedure is different and the function used to read
@ -309,18 +313,18 @@ FTP
===
The `Curl_if2ip()` function can be used for getting the IP number of a
specified network interface, and it resides in lib/if2ip.c.
specified network interface, and it resides in `lib/if2ip.c`.
`Curl_ftpsendf()` is used for sending FTP commands to the remote server. It
was made a separate function to prevent us programmers from forgetting that
they must be CRLF terminated. They must also be sent in one single write() to
make firewalls and similar happy.
they must be CRLF terminated. They must also be sent in one single `write()`
to make firewalls and similar happy.
<a name="kerberos"></a>
Kerberos
--------
========
Kerberos support is mainly in lib/krb5.c and lib/security.c but also
Kerberos support is mainly in `lib/krb5.c` and `lib/security.c` but also
`curl_sasl_sspi.c` and `curl_sasl_gssapi.c` for the email protocols and
`socks_gssapi.c` and `socks_sspi.c` for SOCKS5 proxy specifics.
@ -328,55 +332,57 @@ Kerberos
TELNET
======
Telnet is implemented in lib/telnet.c.
Telnet is implemented in `lib/telnet.c`.
<a name="file"></a>
FILE
====
The file:// protocol is dealt with in lib/file.c.
The `file://` protocol is dealt with in `lib/file.c`.
<a name="smb"></a>
SMB
===
The smb:// protocol is dealt with in lib/smb.c.
The `smb://` protocol is dealt with in `lib/smb.c`.
<a name="ldap"></a>
LDAP
====
Everything LDAP is in lib/ldap.c and lib/openldap.c
Everything LDAP is in `lib/ldap.c` and `lib/openldap.c`.
<a name="email"></a>
E-mail
======
The e-mail related source code is in lib/imap.c, lib/pop3.c and lib/smtp.c.
The e-mail related source code is in `lib/imap.c`, `lib/pop3.c` and
`lib/smtp.c`.
<a name="general"></a>
General
=======
URL encoding and decoding, called escaping and unescaping in the source code,
is found in lib/escape.c.
is found in `lib/escape.c`.
While transferring data in Transfer() a few functions might get used.
`curl_getdate()` in lib/parsedate.c is for HTTP date comparisons (and more).
While transferring data in `Transfer()` a few functions might get used.
`curl_getdate()` in `lib/parsedate.c` is for HTTP date comparisons (and
more).
lib/getenv.c offers `curl_getenv()` which is for reading environment
`lib/getenv.c` offers `curl_getenv()` which is for reading environment
variables in a neat platform independent way. That's used in the client, but
also in lib/url.c when checking the proxy environment variables. Note that
contrary to the normal unix getenv(), this returns an allocated buffer that
must be free()ed after use.
also in `lib/url.c` when checking the proxy environment variables. Note that
contrary to the normal unix `getenv()`, this returns an allocated buffer that
must be `free()`ed after use.
lib/netrc.c holds the .netrc parser
`lib/netrc.c` holds the `.netrc` parser.
lib/timeval.c features replacement functions for systems that don't have
gettimeofday() and a few support functions for timeval conversions.
`lib/timeval.c` features replacement functions for systems that don't have
`gettimeofday()` and a few support functions for timeval conversions.
A function named `curl_version()` that returns the full curl version string
is found in lib/version.c.
is found in `lib/version.c`.
<a name="persistent"></a>
Persistent Connections
@ -390,7 +396,7 @@ Persistent Connections
as well as all the options etc that the library-user may choose.
- The `Curl_easy` struct holds the "connection cache" (an array of
pointers to 'connectdata' structs).
pointers to `connectdata` structs).
- This enables the 'curl handle' to be reused on subsequent transfers.
@ -438,10 +444,10 @@ SSL libraries
in future libcurl versions.
To deal with this internally in the best way possible, we have a generic SSL
function API as provided by the vtls/vtls.[ch] system, and they are the only
function API as provided by the `vtls/vtls.[ch]` system, and they are the only
SSL functions we must use from within libcurl. vtls is then crafted to use
the appropriate lower-level function calls to whatever SSL library that is in
use. For example vtls/openssl.[ch] for the OpenSSL library.
use. For example `vtls/openssl.[ch]` for the OpenSSL library.
<a name="symbols"></a>
Library Symbols
@ -460,7 +466,7 @@ Return Codes and Informationals
I've made things simple. Almost every function in libcurl returns a CURLcode,
that must be `CURLE_OK` if everything is OK or otherwise a suitable error
code as the curl/curl.h include file defines. The very spot that detects an
code as the `curl/curl.h` include file defines. The very spot that detects an
error must use the `Curl_failf()` function to set the human-readable error
description.
@ -482,20 +488,20 @@ API/ABI
Client
======
main() resides in `src/tool_main.c`.
`main()` resides in `src/tool_main.c`.
`src/tool_hugehelp.c` is automatically generated by the mkhelp.pl perl script
to display the complete "manual" and the `src/tool_urlglob.c` file holds the
functions used for the URL-"globbing" support. Globbing in the sense that the
{} and [] expansion stuff is there.
`src/tool_hugehelp.c` is automatically generated by the `mkhelp.pl` perl
script to display the complete "manual" and the `src/tool_urlglob.c` file
holds the functions used for the URL-"globbing" support. Globbing in the
sense that the `{}` and `[]` expansion stuff is there.
The client mostly sets up its 'config' struct properly, then
The client mostly sets up its `config` struct properly, then
it calls the `curl_easy_*()` functions of the library and when it gets back
control after the `curl_easy_perform()` it cleans up the library, checks
status and exits.
When the operation is done, the ourWriteOut() function in src/writeout.c may
be called to report about the operation. That function is using the
When the operation is done, the `ourWriteOut()` function in `src/writeout.c`
may be called to report about the operation. That function is using the
`curl_easy_getinfo()` function to extract useful information from the curl
session.
@ -506,30 +512,32 @@ Client
Memory Debugging
================
The file lib/memdebug.c contains debug-versions of a few functions. Functions
such as malloc, free, fopen, fclose, etc that somehow deal with resources
that might give us problems if we "leak" them. The functions in the memdebug
system do nothing fancy, they do their normal function and then log
information about what they just did. The logged data can then be analyzed
after a complete session,
The file `lib/memdebug.c` contains debug-versions of a few functions.
Functions such as `malloc()`, `free()`, `fopen()`, `fclose()`, etc that
somehow deal with resources that might give us problems if we "leak" them.
The functions in the memdebug system do nothing fancy, they do their normal
function and then log information about what they just did. The logged data
can then be analyzed after a complete session,
memanalyze.pl is the perl script present in tests/ that analyzes a log file
generated by the memory tracking system. It detects if resources are
`memanalyze.pl` is the perl script present in `tests/` that analyzes a log
file generated by the memory tracking system. It detects if resources are
allocated but never freed and other kinds of errors related to resource
management.
Internally, definition of preprocessor symbol DEBUGBUILD restricts code which
is only compiled for debug enabled builds. And symbol CURLDEBUG is used to
differentiate code which is _only_ used for memory tracking/debugging.
Internally, definition of preprocessor symbol `DEBUGBUILD` restricts code
which is only compiled for debug enabled builds. And symbol `CURLDEBUG` is
used to differentiate code which is _only_ used for memory
tracking/debugging.
Use -DCURLDEBUG when compiling to enable memory debugging, this is also
switched on by running configure with --enable-curldebug. Use -DDEBUGBUILD
when compiling to enable a debug build or run configure with --enable-debug.
Use `-DCURLDEBUG` when compiling to enable memory debugging, this is also
switched on by running configure with `--enable-curldebug`. Use
`-DDEBUGBUILD` when compiling to enable a debug build or run configure with
`--enable-debug`.
curl --version will list 'Debug' feature for debug enabled builds, and
`curl --version` will list 'Debug' feature for debug enabled builds, and
will list 'TrackMemory' feature for curl debug memory tracking capable
builds. These features are independent and can be controlled when running
the configure script. When --enable-debug is given both features will be
the configure script. When `--enable-debug` is given both features will be
enabled, unless some restriction prevents memory tracking from being used.
<a name="test"></a>
@ -540,12 +548,12 @@ Test Suite
curl archive tree, and it contains a bunch of scripts and a lot of test case
data.
The main test script is runtests.pl that will invoke test servers like
httpserver.pl and ftpserver.pl before all the test cases are performed. The
test suite currently only runs on Unix-like platforms.
The main test script is `runtests.pl` that will invoke test servers like
`httpserver.pl` and `ftpserver.pl` before all the test cases are performed.
The test suite currently only runs on Unix-like platforms.
You'll find a description of the test suite in the tests/README file, and the
test case data files in the tests/FILEFORMAT file.
You'll find a description of the test suite in the `tests/README` file, and
the test case data files in the `tests/FILEFORMAT` file.
The test suite automatically detects if curl was built with the memory
debugging enabled, and if it was, it will detect memory leaks, too.
@ -573,7 +581,7 @@ Asynchronous name resolves
prevent linking errors later on). Then I simply build the areslib project
(the other projects adig/ahost seem to fail under MSVC).
Next was libcurl. I opened lib/config-win32.h and I added a:
Next was libcurl. I opened `lib/config-win32.h` and I added a:
`#define USE_ARES 1`
Next thing I did was I added the path for the ares includes to the include
@ -582,8 +590,8 @@ Asynchronous name resolves
Lastly, I also changed libcurl to be single-threaded rather than
multi-threaded, again this was to prevent some duplicate symbol errors. I'm
not sure why I needed to change everything to single-threaded, but when I
didn't I got redefinition errors for several CRT functions (malloc, stricmp,
etc.)
didn't I got redefinition errors for several CRT functions (`malloc()`,
`stricmp()`, etc.)
<a name="curl_off_t"></a>
`curl_off_t`
@ -591,9 +599,10 @@ Asynchronous name resolves
`curl_off_t` is a data type provided by the external libcurl include
headers. It is the type meant to be used for the [`curl_easy_setopt()`][1]
options that end with LARGE. The type is 64bit large on most modern
options that end with LARGE. The type is 64-bit large on most modern
platforms.
<a name="curlx"></a>
curlx
=====
@ -603,15 +612,15 @@ curlx
additional functions.
We provide them through a single header file for easy access for apps:
"curlx.h"
`curlx.h`
`curlx_strtoofft()`
-------------------
A macro that converts a string containing a number to a `curl_off_t` number.
This might use the `curlx_strtoll()` function which is provided as source
code in strtoofft.c. Note that the function is only provided if no
strtoll() (or equivalent) function exist on your platform. If `curl_off_t`
is only a 32 bit number on your platform, this macro uses strtol().
`strtoll()` (or equivalent) function exist on your platform. If `curl_off_t`
is only a 32-bit number on your platform, this macro uses `strtol()`.
Future
------
@ -645,27 +654,28 @@ Content Encoding
[HTTP/1.1][4] specifies that a client may request that a server encode its
response. This is usually used to compress a response using one (or more)
encodings from a set of commonly available compression techniques. These
schemes include 'deflate' (the zlib algorithm), 'gzip' 'br' (brotli) and
'compress'. A client requests that the server perform an encoding by including
an Accept-Encoding header in the request document. The value of the header
should be one of the recognized tokens 'deflate', ... (there's a way to
schemes include `deflate` (the zlib algorithm), `gzip`, `br` (brotli) and
`compress`. A client requests that the server perform an encoding by including
an `Accept-Encoding` header in the request document. The value of the header
should be one of the recognized tokens `deflate`, ... (there's a way to
register new schemes/tokens, see sec 3.5 of the spec). A server MAY honor
the client's encoding request. When a response is encoded, the server
includes a Content-Encoding header in the response. The value of the
Content-Encoding header indicates which encodings were used to encode the
includes a `Content-Encoding` header in the response. The value of the
`Content-Encoding` header indicates which encodings were used to encode the
data, in the order in which they were applied.
It's also possible for a client to attach priorities to different schemes so
that the server knows which it prefers. See sec 14.3 of RFC 2616 for more
information on the Accept-Encoding header. See sec [3.1.2.2 of RFC 7231][15]
for more information on the Content-Encoding header.
information on the `Accept-Encoding` header. See sec
[3.1.2.2 of RFC 7231][15] for more information on the `Content-Encoding`
header.
## Supported content encodings
The 'deflate', 'gzip' and 'br' content encodings are supported by libcurl.
The `deflate`, `gzip` and `br` content encodings are supported by libcurl.
Both regular and chunked transfers work fine. The zlib library is required
for the 'deflate' and 'gzip' encodings, while the brotli decoding library is
for the 'br' encoding.
for the `deflate` and `gzip` encodings, while the brotli decoding library is
for the `br` encoding.
## The libcurl interface
@ -673,45 +683,45 @@ Content Encoding
[`curl_easy_setopt`][1](curl, [`CURLOPT_ACCEPT_ENCODING`][5], string)
where string is the intended value of the Accept-Encoding header.
where string is the intended value of the `Accept-Encoding` header.
Currently, libcurl does support multiple encodings but only
understands how to process responses that use the "deflate", "gzip" and/or
"br" content encodings, so the only values for [`CURLOPT_ACCEPT_ENCODING`][5]
that will work (besides "identity," which does nothing) are "deflate",
"gzip" and "br". If a response is encoded using the "compress" or methods,
understands how to process responses that use the `deflate`, `gzip` and/or
`br` content encodings, so the only values for [`CURLOPT_ACCEPT_ENCODING`][5]
that will work (besides `identity`, which does nothing) are `deflate`,
`gzip` and `br`. If a response is encoded using the `compress` or methods,
libcurl will return an error indicating that the response could
not be decoded. If `<string>` is NULL no Accept-Encoding header is generated.
If `<string>` is a zero-length string, then an Accept-Encoding header
containing all supported encodings will be generated.
not be decoded. If `<string>` is NULL no `Accept-Encoding` header is
generated. If `<string>` is a zero-length string, then an `Accept-Encoding`
header containing all supported encodings will be generated.
The [`CURLOPT_ACCEPT_ENCODING`][5] must be set to any non-NULL value for
content to be automatically decoded. If it is not set and the server still
sends encoded content (despite not having been asked), the data is returned
in its raw form and the Content-Encoding type is not checked.
in its raw form and the `Content-Encoding` type is not checked.
## The curl interface
Use the [--compressed][6] option with curl to cause it to ask servers to
Use the [`--compressed`][6] option with curl to cause it to ask servers to
compress responses using any format supported by curl.
<a name="hostip"></a>
hostip.c explained
==================
`hostip.c` explained
====================
The main compile-time defines to keep in mind when reading the host*.c source
file are these:
The main compile-time defines to keep in mind when reading the `host*.c`
source file are these:
## `CURLRES_IPV6`
this host has getaddrinfo() and family, and thus we use that. The host may
this host has `getaddrinfo()` and family, and thus we use that. The host may
not be able to resolve IPv6, but we don't really have to take that into
account. Hosts that aren't IPv6-enabled have `CURLRES_IPV4` defined.
## `CURLRES_ARES`
is defined if libcurl is built to use c-ares for asynchronous name
resolves. This can be Windows or *nix.
resolves. This can be Windows or \*nix.
## `CURLRES_THREADED`
@ -724,20 +734,20 @@ hostip.c explained
libcurl is not built to use an asynchronous resolver, `CURLRES_SYNCH` is
defined.
## host*.c sources
## `host*.c` sources
The host*.c sources files are split up like this:
The `host*.c` sources files are split up like this:
- hostip.c - method-independent resolver functions and utility functions
- hostasyn.c - functions for asynchronous name resolves
- hostsyn.c - functions for synchronous name resolves
- asyn-ares.c - functions for asynchronous name resolves using c-ares
- asyn-thread.c - functions for asynchronous name resolves using threads
- hostip4.c - IPv4 specific functions
- hostip6.c - IPv6 specific functions
- `hostip.c` - method-independent resolver functions and utility functions
- `hostasyn.c` - functions for asynchronous name resolves
- `hostsyn.c` - functions for synchronous name resolves
- `asyn-ares.c` - functions for asynchronous name resolves using c-ares
- `asyn-thread.c` - functions for asynchronous name resolves using threads
- `hostip4.c` - IPv4 specific functions
- `hostip6.c` - IPv6 specific functions
The hostip.h is the single united header file for all this. It defines the
`CURLRES_*` defines based on the config*.h and `curl_setup.h` defines.
The `hostip.h` is the single united header file for all this. It defines the
`CURLRES_*` defines based on the `config*.h` and `curl_setup.h` defines.
<a name="memoryleak"></a>
Track Down Memory Leaks
@ -749,14 +759,13 @@ Track Down Memory Leaks
than one thread. If you want/need to use it in a multi-threaded app. Please
adjust accordingly.
## Build
Rebuild libcurl with -DCURLDEBUG (usually, rerunning configure with
--enable-debug fixes this). 'make clean' first, then 'make' so that all
Rebuild libcurl with `-DCURLDEBUG` (usually, rerunning configure with
`--enable-debug` fixes this). `make clean` first, then `make` so that all
files are actually rebuilt properly. It will also make sense to build
libcurl with the debug option (usually -g to the compiler) so that debugging
it will be easier if you actually do find a leak in the library.
libcurl with the debug option (usually `-g` to the compiler) so that
debugging it will be easier if you actually do find a leak in the library.
This will create a library that has memory debugging enabled.
@ -764,7 +773,7 @@ Track Down Memory Leaks
Add a line in your application code:
`curl_memdebug("dump");`
`curl_dbg_memdebug("dump");`
This will make the malloc debug system output a full trace of all resource
using functions to the given file name. Make sure you rebuild your program
@ -780,7 +789,7 @@ Track Down Memory Leaks
## Analyze the Flow
Use the tests/memanalyze.pl perl script to analyze the dump file:
Use the `tests/memanalyze.pl` perl script to analyze the dump file:
tests/memanalyze.pl dump
@ -796,45 +805,46 @@ Track Down Memory Leaks
Implementation of the `curl_multi_socket` API
The main ideas of this API are simply:
The main ideas of this API are simply:
1 - The application can use whatever event system it likes as it gets info
from libcurl about what file descriptors libcurl waits for what action
on. (The previous API returns `fd_sets` which is very select()-centric).
1. The application can use whatever event system it likes as it gets info
from libcurl about what file descriptors libcurl waits for what action
on. (The previous API returns `fd_sets` which is very
`select()`-centric).
2 - When the application discovers action on a single socket, it calls
libcurl and informs that there was action on this particular socket and
libcurl can then act on that socket/transfer only and not care about
any other transfers. (The previous API always had to scan through all
the existing transfers.)
2. When the application discovers action on a single socket, it calls
libcurl and informs that there was action on this particular socket and
libcurl can then act on that socket/transfer only and not care about
any other transfers. (The previous API always had to scan through all
the existing transfers.)
The idea is that [`curl_multi_socket_action()`][7] calls a given callback
with information about what socket to wait for what action on, and the
callback only gets called if the status of that socket has changed.
The idea is that [`curl_multi_socket_action()`][7] calls a given callback
with information about what socket to wait for what action on, and the
callback only gets called if the status of that socket has changed.
We also added a timer callback that makes libcurl call the application when
the timeout value changes, and you set that with [`curl_multi_setopt()`][9]
and the [`CURLMOPT_TIMERFUNCTION`][10] option. To get this to work,
Internally, there's an added struct to each easy handle in which we store
an "expire time" (if any). The structs are then "splay sorted" so that we
can add and remove times from the linked list and yet somewhat swiftly
figure out both how long there is until the next nearest timer expires
and which timer (handle) we should take care of now. Of course, the upside
of all this is that we get a [`curl_multi_timeout()`][8] that should also
work with old-style applications that use [`curl_multi_perform()`][11].
We also added a timer callback that makes libcurl call the application when
the timeout value changes, and you set that with [`curl_multi_setopt()`][9]
and the [`CURLMOPT_TIMERFUNCTION`][10] option. To get this to work,
Internally, there's an added struct to each easy handle in which we store
an "expire time" (if any). The structs are then "splay sorted" so that we
can add and remove times from the linked list and yet somewhat swiftly
figure out both how long there is until the next nearest timer expires
and which timer (handle) we should take care of now. Of course, the upside
of all this is that we get a [`curl_multi_timeout()`][8] that should also
work with old-style applications that use [`curl_multi_perform()`][11].
We created an internal "socket to easy handles" hash table that given
a socket (file descriptor) returns the easy handle that waits for action on
that socket. This hash is made using the already existing hash code
(previously only used for the DNS cache).
We created an internal "socket to easy handles" hash table that given
a socket (file descriptor) returns the easy handle that waits for action on
that socket. This hash is made using the already existing hash code
(previously only used for the DNS cache).
To make libcurl able to report plain sockets in the socket callback, we had
to re-organize the internals of the [`curl_multi_fdset()`][12] etc so that
the conversion from sockets to `fd_sets` for that function is only done in
the last step before the data is returned. I also had to extend c-ares to
get a function that can return plain sockets, as that library too returned
only `fd_sets` and that is no longer good enough. The changes done to c-ares
are available in c-ares 1.3.1 and later.
To make libcurl able to report plain sockets in the socket callback, we had
to re-organize the internals of the [`curl_multi_fdset()`][12] etc so that
the conversion from sockets to `fd_sets` for that function is only done in
the last step before the data is returned. I also had to extend c-ares to
get a function that can return plain sockets, as that library too returned
only `fd_sets` and that is no longer good enough. The changes done to c-ares
are available in c-ares 1.3.1 and later.
<a name="structs"></a>
Structs in libcurl
@ -843,40 +853,42 @@ Structs in libcurl
This section should cover 7.32.0 pretty accurately, but will make sense even
for older and later versions as things don't change drastically that often.
<a name="Curl_easy"></a>
## Curl_easy
The `Curl_easy` struct is the one returned to the outside in the external API
as a "CURL *". This is usually known as an easy handle in API documentations
as a `CURL *`. This is usually known as an easy handle in API documentations
and examples.
Information and state that is related to the actual connection is in the
'connectdata' struct. When a transfer is about to be made, libcurl will
`connectdata` struct. When a transfer is about to be made, libcurl will
either create a new connection or re-use an existing one. The particular
connectdata that is used by this handle is pointed out by
`Curl_easy->easy_conn`.
Data and information that regard this particular single transfer is put in
the SingleRequest sub-struct.
the `SingleRequest` sub-struct.
When the `Curl_easy` struct is added to a multi handle, as it must be in
order to do any transfer, the ->multi member will point to the `Curl_multi`
struct it belongs to. The ->prev and ->next members will then be used by the
multi code to keep a linked list of `Curl_easy` structs that are added to
that same multi handle. libcurl always uses multi so ->multi *will* point to
a `Curl_multi` when a transfer is in progress.
order to do any transfer, the `->multi` member will point to the `Curl_multi`
struct it belongs to. The `->prev` and `->next` members will then be used by
the multi code to keep a linked list of `Curl_easy` structs that are added to
that same multi handle. libcurl always uses multi so `->multi` *will* point
to a `Curl_multi` when a transfer is in progress.
->mstate is the multi state of this particular `Curl_easy`. When
`->mstate` is the multi state of this particular `Curl_easy`. When
`multi_runsingle()` is called, it will act on this handle according to which
state it is in. The mstate is also what tells which sockets to return for a
specific `Curl_easy` when [`curl_multi_fdset()`][12] is called etc.
The libcurl source code generally use the name 'data' for the variable that
The libcurl source code generally use the name `data` for the variable that
points to the `Curl_easy`.
When doing multiplexed HTTP/2 transfers, each `Curl_easy` is associated with
an individual stream, sharing the same connectdata struct. Multiplexing
makes it even more important to keep things associated with the right thing!
<a name="connectdata"></a>
## connectdata
A general idea in libcurl is to keep connections around in a connection
@ -884,16 +896,16 @@ for older and later versions as things don't change drastically that often.
re-use an existing one instead of creating a new as it creates a significant
performance boost.
Each 'connectdata' identifies a single physical connection to a server. If
Each `connectdata` identifies a single physical connection to a server. If
the connection can't be kept alive, the connection will be closed after use
and then this struct can be removed from the cache and freed.
Thus, the same `Curl_easy` can be used multiple times and each time select
another connectdata struct to use for the connection. Keep this in mind, as
it is then important to consider if options or choices are based on the
another `connectdata` struct to use for the connection. Keep this in mind,
as it is then important to consider if options or choices are based on the
connection or the `Curl_easy`.
Functions in libcurl will assume that connectdata->data points to the
Functions in libcurl will assume that `connectdata->data` points to the
`Curl_easy` that uses this connection (for the moment).
As a special complexity, some protocols supported by libcurl require a
@ -908,15 +920,16 @@ for older and later versions as things don't change drastically that often.
this single struct and thus can be considered a single connection for most
internal concerns.
The libcurl source code generally use the name 'conn' for the variable that
The libcurl source code generally use the name `conn` for the variable that
points to the connectdata.
<a name="Curl_multi"></a>
## Curl_multi
Internally, the easy interface is implemented as a wrapper around multi
interface functions. This makes everything multi interface.
`Curl_multi` is the multi handle struct exposed as "CURLM *" in external
`Curl_multi` is the multi handle struct exposed as `CURLM *` in external
APIs.
This struct holds a list of `Curl_easy` structs that have been added to this
@ -943,18 +956,19 @@ for older and later versions as things don't change drastically that often.
`->conn_cache` points to the connection cache. It keeps track of all
connections that are kept after use. The cache has a maximum size.
`->closure_handle` is described in the 'connectdata' section.
`->closure_handle` is described in the `connectdata` section.
The libcurl source code generally use the name 'multi' for the variable that
The libcurl source code generally use the name `multi` for the variable that
points to the `Curl_multi` struct.
<a name="Curl_handler"></a>
## Curl_handler
Each unique protocol that is supported by libcurl needs to provide at least
one `Curl_handler` struct. It defines what the protocol is called and what
functions the main code should call to deal with protocol specific issues.
In general, there's a source file named [protocol].c in which there's a
"struct `Curl_handler` `Curl_handler_[protocol]`" declared. In url.c there's
In general, there's a source file named `[protocol].c` in which there's a
`struct Curl_handler Curl_handler_[protocol]` declared. In `url.c` there's
then the main array with all individual `Curl_handler` structs pointed to
from a single array which is scanned through when a URL is given to libcurl
to work with.
@ -966,9 +980,9 @@ for older and later versions as things don't change drastically that often.
`->setup_connection` is called to allow the protocol code to allocate
protocol specific data that then gets associated with that `Curl_easy` for
the rest of this transfer. It gets freed again at the end of the transfer.
It will be called before the 'connectdata' for the transfer has been
It will be called before the `connectdata` for the transfer has been
selected/created. Most protocols will allocate its private
'struct [PROTOCOL]' here and assign `Curl_easy->req.protop` to point to it.
`struct [PROTOCOL]` here and assign `Curl_easy->req.protop` to point to it.
`->connect_it` allows a protocol to do some specific actions after the TCP
connect is done, that can still be considered part of the connection phase.
@ -995,25 +1009,25 @@ for older and later versions as things don't change drastically that often.
`->do_more` gets called during the `DO_MORE` state. The FTP protocol uses
this state when setting up the second connection.
->`proto_getsock`
->`doing_getsock`
->`domore_getsock`
->`perform_getsock`
`->proto_getsock`
`->doing_getsock`
`->domore_getsock`
`->perform_getsock`
Functions that return socket information. Which socket(s) to wait for which
action(s) during the particular multi state.
->disconnect is called immediately before the TCP connection is shutdown.
`->disconnect` is called immediately before the TCP connection is shutdown.
->readwrite gets called during transfer to allow the protocol to do extra
`->readwrite` gets called during transfer to allow the protocol to do extra
reads/writes
->defport is the default report TCP or UDP port this protocol uses
`->defport` is the default report TCP or UDP port this protocol uses
->protocol is one or more bits in the `CURLPROTO_*` set. The SSL versions
`->protocol` is one or more bits in the `CURLPROTO_*` set. The SSL versions
have their "base" protocol set and then the SSL variation. Like
"HTTP|HTTPS".
->flags is a bitmask with additional information about the protocol that will
`->flags` is a bitmask with additional information about the protocol that will
make it get treated differently by the generic engine:
- `PROTOPT_SSL` - will make it connect and negotiate SSL
@ -1028,7 +1042,7 @@ for older and later versions as things don't change drastically that often.
limit which "direction" of socket actions that the main engine will
concern itself with.
- `PROTOPT_NONETWORK` - a protocol that doesn't use network (read file:)
- `PROTOPT_NONETWORK` - a protocol that doesn't use network (read `file:`)
- `PROTOPT_NEEDSPWD` - this protocol needs a password and will use a default
one unless one is provided
@ -1036,16 +1050,18 @@ for older and later versions as things don't change drastically that often.
- `PROTOPT_NOURLQUERY` - this protocol can't handle a query part on the URL
(?foo=bar)
<a name="conncache"></a>
## conncache
Is a hash table with connections for later re-use. Each `Curl_easy` has a
pointer to its connection cache. Each multi handle sets up a connection
cache that all added `Curl_easy`s share by default.
<a name="Curl_share"></a>
## Curl_share
The libcurl share API allocates a `Curl_share` struct, exposed to the
external API as "CURLSH *".
external API as `CURLSH *`.
The idea is that the struct can have a set of its own versions of caches and
pools and then by providing this struct in the `CURLOPT_SHARE` option, those
@ -1058,10 +1074,11 @@ for older and later versions as things don't change drastically that often.
The `Curl_share` struct can currently hold cookies, DNS cache and the SSL
session cache.
<a name="CookieInfo"></a>
## CookieInfo
This is the main cookie struct. It holds all known cookies and related
information. Each `Curl_easy` has its own private CookieInfo even when
information. Each `Curl_easy` has its own private `CookieInfo` even when
they are added to a multi handle. They can be made to share cookies by using
the share API.

View file

@ -12,14 +12,13 @@ check the changelog of the current development status, as one or more of these
problems may have been fixed or changed somewhat since this was written!
1. HTTP
1.1 CURLFORM_CONTENTLEN in an array
1.2 Disabling HTTP Pipelining
1.3 STARTTRANSFER time is wrong for HTTP POSTs
1.4 multipart formposts file name encoding
1.5 Expect-100 meets 417
1.6 Unnecessary close when 401 received waiting for 100
1.7 Deflate error after all content was received
1.8 DoH isn't used for all name resolves when enabled
1.9 HTTP/2 frames while in the connection pool kill reuse
1.10 Strips trailing dot from host name
1.11 CURLOPT_SEEKFUNCTION not called with CURLFORM_STREAM
2. TLS
@ -29,6 +28,8 @@ problems may have been fixed or changed somewhat since this was written!
2.4 DarwinSSL won't import PKCS#12 client certificates without a password
2.5 Client cert handling with Issuer DN differs between backends
2.6 CURL_GLOBAL_SSL
2.7 Client cert (MTLS) issues with Schannel
2.8 Schannel disable CURLOPT_SSL_VERIFYPEER and verify hostname
3. Email protocols
3.1 IMAP SEARCH ALL truncated response
@ -41,14 +42,19 @@ problems may have been fixed or changed somewhat since this was written!
4.2 -J with -C - fails
4.3 --retry and transfer timeouts
4.4 --upload-file . hang if delay in STDIN
4.5 Improve --data-urlencode space encoding
5. Build and portability issues
5.1 USE_UNIX_SOCKETS on Windows
5.2 curl-config --libs contains private details
5.3 curl compiled on OSX 10.13 failed to run on OSX 10.10
5.4 Cannot compile against a static build of OpenLDAP
5.5 can't handle Unicode arguments in Windows
5.6 cmake support gaps
5.7 Visual Studio project gaps
5.8 configure finding libs in wrong directory
5.9 Utilize Requires.private directives in libcurl.pc
5.10 IDN tests failing on Windows / MSYS2
6. Authentication
6.1 NTLM authentication and unicode
@ -57,6 +63,7 @@ problems may have been fixed or changed somewhat since this was written!
6.4 Negotiate and Kerberos V5 need a fake user name
6.5 NTLM doesn't support password with § character
6.6 libcurl can fail to try alternatives with --proxy-any
6.7 Don't clear digest for single realm
7. FTP
7.1 FTP without or slow 220 response
@ -90,9 +97,11 @@ problems may have been fixed or changed somewhat since this was written!
11.4 HTTP test server 'connection-monitor' problems
11.5 Connection information when using TCP Fast Open
11.6 slow connect to localhost on Windows
11.7 signal-based resolver timeouts
12. LDAP and OpenLDAP
12.1 OpenLDAP hangs after returning results
12.2 LDAP on Windows does authentication wrong?
13. TCP/IP
13.1 --interface for ipv6 binds to unusable IP address
@ -104,23 +113,6 @@ problems may have been fixed or changed somewhat since this was written!
1. HTTP
1.1 CURLFORM_CONTENTLEN in an array
It is not possible to pass a 64-bit value using CURLFORM_CONTENTLEN with
CURLFORM_ARRAY, when compiled on 32-bit platforms that support 64-bit
integers. This is because the underlying structure 'curl_forms' uses a dual
purpose char* for storing these values in via casting. For more information
see the now closed related issue:
https://github.com/curl/curl/issues/608
1.2 Disabling HTTP Pipelining
Disabling HTTP Pipelining when there are ongoing transfers can lead to
heap corruption and crash. https://curl.haxx.se/bug/view.cgi?id=1411
Similarly, removing a handle when pipelining corrupts data:
https://github.com/curl/curl/issues/2101
1.3 STARTTRANSFER time is wrong for HTTP POSTs
Wrong STARTTRANSFER timer accounting for POST requests Timer works fine with
@ -152,6 +144,25 @@ problems may have been fixed or changed somewhat since this was written!
waiting for the the 100-continue response.
https://curl.haxx.se/mail/lib-2008-08/0462.html
1.7 Deflate error after all content was received
There's a situation where we can get an error in a HTTP response that is
compressed, when that error is detected after all the actual body contents
have been received and delivered to the application. This is tricky, but is
ultimately a broken server.
See https://github.com/curl/curl/issues/2719
1.8 DoH isn't used for all name resolves when enabled
Even if DoH is specified to be used, there are some name resolves that are
done without it. This should be fixed. When the internal function
`Curl_resolver_wait_resolv()` is called, it doesn't use DoH to complete the
resolve as it otherwise should.
See https://github.com/curl/curl/pull/3857 and
https://github.com/curl/curl/pull/3850
1.9 HTTP/2 frames while in the connection pool kill reuse
If the server sends HTTP/2 frames (like for example an HTTP/2 PING frame) to
@ -162,42 +173,6 @@ problems may have been fixed or changed somewhat since this was written!
This is *best* fixed by adding monitoring to connections while they are kept
in the pool so that pings can be responded to appropriately.
1.10 Strips trailing dot from host name
When given a URL with a trailing dot for the host name part:
"https://example.com./", libcurl will strip off the dot and use the name
without a dot internally and send it dot-less in HTTP Host: headers and in
the TLS SNI field. For the purpose of resolving the name to an address
the hostname is used as is without any change.
The HTTP part violates RFC 7230 section 5.4 but the SNI part is accordance
with RFC 6066 section 3.
URLs using these trailing dots are very rare in the wild and we have not seen
or gotten any real-world problems with such URLs reported. The popular
browsers seem to have stayed with not stripping the dot for both uses (thus
they violate RFC 6066 instead of RFC 7230).
Daniel took the discussion to the HTTPbis mailing list in March 2016:
https://lists.w3.org/Archives/Public/ietf-http-wg/2016JanMar/0430.html but
there was not major rush or interest to fix this. The impression I get is
that most HTTP people rather not rock the boat now and instead prioritize web
compatibility rather than to strictly adhere to these RFCs.
Our current approach allows a knowing client to send a custom HTTP header
with the dot added.
In a few cases there is a difference in name resolving to IP addresses with
a trailing dot, but it can be noted that many HTTP servers will not happily
accept the trailing dot there unless that has been specifically configured
to be a fine virtual host.
If URLs with trailing dots for host names become more popular or even just
used more than for just plain fun experiments, I'm sure we will have reason
to go back and reconsider.
See https://github.com/curl/curl/issues/716 for the discussion.
1.11 CURLOPT_SEEKFUNCTION not called with CURLFORM_STREAM
I'm using libcurl to POST form data using a FILE* with the CURLFORM_STREAM
@ -270,6 +245,15 @@ problems may have been fixed or changed somewhat since this was written!
https://github.com/curl/curl/issues/2276
2.7 Client cert (MTLS) issues with Schannel
See https://github.com/curl/curl/issues/3145
2.8 Schannel disable CURLOPT_SSL_VERIFYPEER and verify hostname
This seems to be a limitation in the underlying Schannel API.
https://github.com/curl/curl/issues/3284
3. Email protocols
@ -344,14 +328,36 @@ problems may have been fixed or changed somewhat since this was written!
See https://github.com/curl/curl/issues/2051
4.5 Improve --data-urlencode space encoding
ASCII space characters in --data-urlencode are currently encoded as %20
rather than +, which RFC 1866 says should be used.
See https://github.com/curl/curl/issues/3229
5. Build and portability issues
5.1 USE_UNIX_SOCKETS on Windows
Due to incorrect CMake checks for the presense of the feature, it will never
be enabled for windows in a cmake build.
See https://github.com/curl/curl/issues/4040
5.2 curl-config --libs contains private details
"curl-config --libs" will include details set in LDFLAGS when configure is
run that might be needed only for building libcurl. Further, curl-config
--cflags suffers from the same effects with CFLAGS/CPPFLAGS.
5.3 curl compiled on OSX 10.13 failed to run on OSX 10.10
See https://github.com/curl/curl/issues/2905
5.4 Cannot compile against a static build of OpenLDAP
See https://github.com/curl/curl/issues/2367
5.5 can't handle Unicode arguments in Windows
If a URL or filename can't be encoded using the user's current codepage then
@ -369,10 +375,16 @@ problems may have been fixed or changed somewhat since this was written!
offers. This includes:
- use of correct soname for the shared library build
- support for several TLS backends are missing
- the unit tests cause link failures in regular non-static builds
- no nghttp2 check
- unusable tool_hugehelp.c with MinGW, see
https://github.com/curl/curl/issues/3125
5.7 Visual Studio project gaps
The Visual Studio projects lack some features that the autoconf and nmake
@ -408,6 +420,13 @@ problems may have been fixed or changed somewhat since this was written!
https://github.com/curl/curl/issues/864
5.10 IDN tests failing on Windows / MSYS2
It seems like MSYS2 does some UTF-8-to-something-else conversion for Windows
compatibility.
https://github.com/curl/curl/issues/3747
6. Authentication
6.1 NTLM authentication and unicode
@ -460,6 +479,10 @@ problems may have been fixed or changed somewhat since this was written!
https://github.com/curl/curl/issues/876
6.7 Don't clear digest for single realm
https://github.com/curl/curl/issues/3267
7. FTP
7.1 FTP without or slow 220 response
@ -667,6 +690,19 @@ problems may have been fixed or changed somewhat since this was written!
https://github.com/curl/curl/issues/2281
11.7 signal-based resolver timeouts
libcurl built without an asynchronous resolver library uses alarm() to time
out DNS lookups. When a timeout occurs, this causes libcurl to jump from the
signal handler back into the library with a sigsetjmp, which effectively
causes libcurl to continue running within the signal handler. This is
non-portable and could cause problems on some platforms. A discussion on the
problem is available at https://curl.haxx.se/mail/lib-2008-09/0197.html
Also, alarm() provides timeout resolution only to the nearest second. alarm
ought to be replaced by setitimer on systems that support it.
12. LDAP and OpenLDAP
12.1 OpenLDAP hangs after returning results
@ -688,6 +724,9 @@ problems may have been fixed or changed somewhat since this was written!
See https://github.com/curl/curl/issues/622 and
https://curl.haxx.se/mail/lib-2016-01/0101.html
12.2 LDAP on Windows does authentication wrong?
https://github.com/curl/curl/issues/3116
13. TCP/IP

File diff suppressed because it is too large Load diff

View file

@ -5,7 +5,7 @@
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 1998 - 2018, Daniel Stenberg, <daniel@haxx.se>, et al.
# Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
@ -42,7 +42,9 @@ CLEANFILES = $(GENHTMLPAGES) $(PDFPAGES) $(MANDISTPAGES) curl.1
EXTRA_DIST = \
$(noinst_man_MANS) \
ALTSVC.md \
BINDINGS.md \
BUG-BOUNTY.md \
BUGS \
CHECKSRC.md \
CIPHERS.md \
@ -51,6 +53,8 @@ EXTRA_DIST = \
CODE_STYLE.md \
CONTRIBUTE.md \
DEPRECATE.md \
ESNI.md \
EXPERIMENTAL.md \
FAQ \
FEATURES \
GOVERNANCE.md \
@ -58,6 +62,7 @@ EXTRA_DIST = \
HISTORY.md \
HTTP-COOKIES.md \
HTTP2.md \
HTTP3.md \
INSTALL \
INSTALL.cmake \
INSTALL.md \
@ -65,7 +70,7 @@ EXTRA_DIST = \
KNOWN_BUGS \
LICENSE-MIXING.md \
MAIL-ETIQUETTE \
MANUAL \
PARALLEL-TRANSFERS.md \
README.cmake \
README.md \
README.netware \
@ -95,7 +100,7 @@ SUFFIXES = .1 .html .pdf
# have changed.
$(abs_builddir)/curl.1:
if test "$(top_builddir)x" != "$(top_srcdir)x" -a -e "$(srcdir)/curl.1"; then \
cp -fp "$(srcdir)/curl.1" $@; fi
$(INSTALL_DATA) "$(srcdir)/curl.1" $@; fi
cd cmdline-opts && $(MAKE)
html: $(HTMLPAGES)

View file

@ -21,7 +21,7 @@
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 1998 - 2018, Daniel Stenberg, <daniel@haxx.se>, et al.
# Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
@ -111,8 +111,7 @@ build_triplet = @build@
host_triplet = @host@
subdir = docs
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/ax_code_coverage.m4 \
$(top_srcdir)/m4/ax_compile_check_sizeof.m4 \
am__aclocal_m4_deps = $(top_srcdir)/m4/ax_compile_check_sizeof.m4 \
$(top_srcdir)/m4/curl-compilers.m4 \
$(top_srcdir)/m4/curl-confopts.m4 \
$(top_srcdir)/m4/curl-functions.m4 \
@ -266,12 +265,6 @@ CC = @CC@
CCDEPMODE = @CCDEPMODE@
CFLAGS = @CFLAGS@
CFLAG_CURL_SYMBOL_HIDING = @CFLAG_CURL_SYMBOL_HIDING@
CODE_COVERAGE_CFLAGS = @CODE_COVERAGE_CFLAGS@
CODE_COVERAGE_CPPFLAGS = @CODE_COVERAGE_CPPFLAGS@
CODE_COVERAGE_CXXFLAGS = @CODE_COVERAGE_CXXFLAGS@
CODE_COVERAGE_ENABLED = @CODE_COVERAGE_ENABLED@
CODE_COVERAGE_LDFLAGS = @CODE_COVERAGE_LDFLAGS@
CODE_COVERAGE_LIBS = @CODE_COVERAGE_LIBS@
CONFIGURE_OPTIONS = @CONFIGURE_OPTIONS@
CPP = @CPP@
CPPFLAGS = @CPPFLAGS@
@ -313,14 +306,15 @@ ENABLE_SHARED = @ENABLE_SHARED@
ENABLE_STATIC = @ENABLE_STATIC@
EXEEXT = @EXEEXT@
FGREP = @FGREP@
FISH_FUNCTIONS_DIR = @FISH_FUNCTIONS_DIR@
GCOV = @GCOV@
GENHTML = @GENHTML@
GREP = @GREP@
HAVE_BROTLI = @HAVE_BROTLI@
HAVE_GNUTLS_SRP = @HAVE_GNUTLS_SRP@
HAVE_LDAP_SSL = @HAVE_LDAP_SSL@
HAVE_LIBZ = @HAVE_LIBZ@
HAVE_OPENSSL_SRP = @HAVE_OPENSSL_SRP@
HAVE_PROTO_BSDSOCKET_H = @HAVE_PROTO_BSDSOCKET_H@
IDN_ENABLED = @IDN_ENABLED@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
@ -381,8 +375,6 @@ STRIP = @STRIP@
SUPPORT_FEATURES = @SUPPORT_FEATURES@
SUPPORT_PROTOCOLS = @SUPPORT_PROTOCOLS@
USE_ARES = @USE_ARES@
USE_CYASSL = @USE_CYASSL@
USE_DARWINSSL = @USE_DARWINSSL@
USE_GNUTLS = @USE_GNUTLS@
USE_GNUTLS_NETTLE = @USE_GNUTLS_NETTLE@
USE_LIBRTMP = @USE_LIBRTMP@
@ -391,12 +383,17 @@ USE_LIBSSH2 = @USE_LIBSSH2@
USE_MBEDTLS = @USE_MBEDTLS@
USE_MESALINK = @USE_MESALINK@
USE_NGHTTP2 = @USE_NGHTTP2@
USE_NGHTTP3 = @USE_NGHTTP3@
USE_NGTCP2 = @USE_NGTCP2@
USE_NGTCP2_CRYPTO_OPENSSL = @USE_NGTCP2_CRYPTO_OPENSSL@
USE_NSS = @USE_NSS@
USE_OPENLDAP = @USE_OPENLDAP@
USE_POLARSSL = @USE_POLARSSL@
USE_QUICHE = @USE_QUICHE@
USE_SCHANNEL = @USE_SCHANNEL@
USE_SECTRANSP = @USE_SECTRANSP@
USE_UNIX_SOCKETS = @USE_UNIX_SOCKETS@
USE_WINDOWS_SSPI = @USE_WINDOWS_SSPI@
USE_WOLFSSL = @USE_WOLFSSL@
VERSION = @VERSION@
VERSIONNUM = @VERSIONNUM@
ZLIB_LIBS = @ZLIB_LIBS@
@ -475,7 +472,9 @@ DIST_SUBDIRS = $(SUBDIRS) examples libcurl
CLEANFILES = $(GENHTMLPAGES) $(PDFPAGES) $(MANDISTPAGES) curl.1
EXTRA_DIST = \
$(noinst_man_MANS) \
ALTSVC.md \
BINDINGS.md \
BUG-BOUNTY.md \
BUGS \
CHECKSRC.md \
CIPHERS.md \
@ -484,6 +483,8 @@ EXTRA_DIST = \
CODE_STYLE.md \
CONTRIBUTE.md \
DEPRECATE.md \
ESNI.md \
EXPERIMENTAL.md \
FAQ \
FEATURES \
GOVERNANCE.md \
@ -491,6 +492,7 @@ EXTRA_DIST = \
HISTORY.md \
HTTP-COOKIES.md \
HTTP2.md \
HTTP3.md \
INSTALL \
INSTALL.cmake \
INSTALL.md \
@ -498,7 +500,7 @@ EXTRA_DIST = \
KNOWN_BUGS \
LICENSE-MIXING.md \
MAIL-ETIQUETTE \
MANUAL \
PARALLEL-TRANSFERS.md \
README.cmake \
README.md \
README.netware \
@ -886,7 +888,7 @@ uninstall-man: uninstall-man1
# have changed.
$(abs_builddir)/curl.1:
if test "$(top_builddir)x" != "$(top_srcdir)x" -a -e "$(srcdir)/curl.1"; then \
cp -fp "$(srcdir)/curl.1" $@; fi
$(INSTALL_DATA) "$(srcdir)/curl.1" $@; fi
cd cmdline-opts && $(MAKE)
html: $(HTMLPAGES)

View file

@ -0,0 +1,58 @@
# Parallel transfers
curl 7.66.0 introduces support for doing multiple transfers simultaneously; in
parallel.
## -Z, --parallel
When this command line option is used, curl will perform the transfers given
to it at the same time. It will do up to `--parallel-max` concurrent
transfers, with a default value of 50.
## Progress meter
The progress meter that is displayed when doing parallel transfers is
completely different than the regular one used for each single transfer.
It shows:
o percent download (if known, which means *all* transfers need to have a
known size)
o precent upload (if known, with the same caveat as for download)
o total amount of downloaded data
o total amount of uploaded data
o number of transfers to perform
o number of concurrent transfers being transferred right now
o number of transfers queued up waiting to start
o total time all transfers are expected to take (if sizes are known)
o current time the transfers have spent so far
o estimated time left (if sizes are known)
o current transfer speed (the faster of UL/DL speeds measured over the last
few seconds)
Example:
DL% UL% Dled Uled Xfers Live Qd Total Current Left Speed
72 -- 37.9G 0 101 30 23 0:00:55 0:00:34 0:00:22 2752M
## Behavior differences
Connections are shared fine between different easy handles, but the
"authentication contexts" are not. So for example doing HTTP Digest auth with
one handle for a particular transfer and then continue on with another handle
that reuses the same connection, the second handle can't send the necessary
Authorization header at once since the context is only kept in the original
easy handle.
To fix this, the authorization state could be made possible to share with the
share API as well, as a context per origin + path (realm?) basically.
Visible in test 153, 1412 and more.
## Feedback!
This is early days for parallel transfer support. Keep your eyes open for
unintended side effects or downright bugs.
Tell us what you think and how you think we could improve this feature!

View file

@ -16,7 +16,7 @@ in the source code repo
- run "./maketgz 7.34.0" to build the release tarballs. It is important that
you run this on a machine with the correct set of autotools etc installed
as this is what then will be shipped and used by most users on *nix like
as this is what then will be shipped and used by most users on \*nix like
systems.
- push the git commits and the new tag
@ -84,9 +84,15 @@ Coming dates
Based on the description above, here are some planned release dates (at the
time of this writing):
- October 31, 2018
- December 12, 2018
- February 6, 2019
- April 3, 2019
- May 29, 2019
- July 24, 2019
- May 22, 2019
- July 17, 2019
- September 11, 2019
- November 6, 2019
- January 8, 2020 (moved)
- February 27, 2020
- April 22, 2020
- June 17, 2020
The above (and more) curl-related dates are published in
[iCalendar format](https://calendar.google.com/calendar/ical/c9u5d64odop9js55oltfarjk6g%40group.calendar.google.com/public/basic.ics)
as well.

View file

@ -5,38 +5,52 @@ Roadmap of things Daniel Stenberg wants to work on next. It is intended to
serve as a guideline for others for information, feedback and possible
participation.
QUIC
HSTS
----
See the [QUIC wiki page](https://github.com/curl/curl/wiki/QUIC).
Complete and merge [the existing PR](https://github.com/curl/curl/pull/2682).
HTTP cookies
Loading a huge preload file is probably not too interesting to most people,
but using a custom file and reacting to HSTS response header probably are
good features.
DNS-over-TLS
------------
Two cookie drafts have been adopted by the httpwg in IETF and we should
support them as the popular browsers will as well:
Similar to DNS-over-HTTPS. Could share quite a lot of generic code.
[Deprecate modification of 'secure' cookies from non-secure
origins](https://tools.ietf.org/html/draft-ietf-httpbis-cookie-alone-00)
ESNI (Encrypted SNI)
--------------------
[Cookie Prefixes](https://tools.ietf.org/html/draft-ietf-httpbis-cookie-prefixes-00)
See Daniel's post on [Support of Encrypted
SNI](https://curl.haxx.se/mail/lib-2019-03/0000.html) on the mailing list.
[Firefox bug report about secure cookies](https://bugzilla.mozilla.org/show_bug.cgi?id=976073)
Initial work exists in https://github.com/curl/curl/pull/4011
SRV records
-----------
tiny-curl
---------
How to find services for specific domains/hosts.
There's no immediate action for this but users seem keen on being able to
building custom minimized versions of libcurl for their products. Make sure
new features that are "niche" can still be disabled at build-time.
Improve
-------
MQTT
----
1. curl -h output (considered overwhelming to users).
Support receiving and sending MQTT messages. Initial work exists in
https://github.com/curl/curl/pull/3514
2. We have > 200 command line options, is there a way to redo things to
simplify or improve the situation as we are likely to keep adding
features/options in the future too.
Hardcode “localhost”
--------------------
3. Perform some of the clean up from the TODO document, removing old
definitions and such like that are currently earmarked to be removed years
ago.
No need to resolve it. Avoid a risk where this is resolved over the network
and actually responds with something else than a local address. Some
operating systems already do this. Also:
https://tools.ietf.org/html/draft-ietf-dnsop-let-localhost-be-localhost-02
"menu config"-style build feature selection
-------------------------------------------
Allow easier building of custom libcurl versions with only a selected feature
where the available features are easily browsable and toggle-able ON/OFF or
similar.

View file

@ -10,9 +10,8 @@ Publishing Information
All known and public curl or libcurl related vulnerabilities are listed on
[the curl web site security page](https://curl.haxx.se/docs/security.html).
Security vulnerabilities should not be entered in the project's public bug
tracker unless the necessary configuration is in place to limit access to the
issue to only the reporter and the project's security team.
Security vulnerabilities **should not** be entered in the project's public bug
tracker.
Vulnerability Handling
----------------------
@ -23,20 +22,20 @@ No information should be made public about a vulnerability until it is
formally announced at the end of this process. That means, for example that a
bug tracker entry must NOT be created to track the issue since that will make
the issue public and it should not be discussed on any of the project's public
mailing lists. Also messages associated with any commits should not make
any reference to the security nature of the commit if done prior to the public
mailing lists. Also messages associated with any commits should not make any
reference to the security nature of the commit if done prior to the public
announcement.
- The person discovering the issue, the reporter, reports the vulnerability
privately to `curl-security@haxx.se`. That's an email alias that reaches a
handful of selected and trusted people.
- The person discovering the issue, the reporter, reports the vulnerability on
[https://hackerone.com/curl](https://hackerone.com/curl). Issues filed there
reach a handful of selected and trusted people.
- Messages that do not relate to the reporting or managing of an undisclosed
security vulnerability in curl or libcurl are ignored and no further action
is required.
- A person in the security team sends an e-mail to the original reporter to
acknowledge the report.
- A person in the security team responds to the original report to acknowledge
that a human has seen the report.
- The security team investigates the report and either rejects it or accepts
it.
@ -51,9 +50,9 @@ announcement.
should involve the reporter as much as possible.
- The release of the information should be "as soon as possible" and is most
often synced with an upcoming release that contains the fix. If the
reporter, or anyone else, thinks the next planned release is too far away
then a separate earlier release for security reasons should be considered.
often synchronized with an upcoming release that contains the fix. If the
reporter, or anyone else involved, thinks the next planned release is too
far away, then a separate earlier release should be considered.
- Write a security advisory draft about the problem that explains what the
problem is, its impact, which versions it affects, solutions or workarounds,
@ -61,12 +60,14 @@ announcement.
Figure out the CWE (Common Weakness Enumeration) number for the flaw.
- Request a CVE number from
[HackerOne](https://docs.hackerone.com/programs/cve-requests.html)
- Consider informing
[distros@openwall](https://oss-security.openwall.org/wiki/mailing-lists/distros)
when also informing and preparing them for the upcoming public security
vulnerability announcement - attach the advisory draft for information. Note
that 'distros' won't accept an embargo longer than 14 days and they do not
care for Windows-specific flaws. For windows-specific flaws, request CVE
directly from MITRE.
to prepare them about the upcoming public security vulnerability
announcement - attach the advisory draft for information. Note that
'distros' won't accept an embargo longer than 14 days and they do not care
for Windows-specific flaws.
- Update the "security advisory" with the CVE number.
@ -93,6 +94,9 @@ announcement.
curl-security (at haxx dot se)
------------------------------
This is a private mailing list for discussions on and about curl security
issues.
Who is on this list? There are a couple of criteria you must meet, and then we
might ask you to join the list or you can ask to join it. It really isn't very
formal. We basically only require that you have a long-term presence in the
@ -121,15 +125,8 @@ Publishing Security Advisories
6. On security advisory release day, push the changes on the curl-www
repository's remote master branch.
Hackerone Internet Bug Bounty
-----------------------------
Bug Bounty
----------
The curl project does not run any bounty program on its own, but there are
outside organizations that do. First report your issue the normal way and
proceed as described in this document.
Then, if the issue is [critical](https://hackerone.com/ibb-data), you are
eligible to apply for a bounty from Hackerone for your find.
Once your reported vulnerability has been publicly disclosed by the curl
project, you can submit a [report to them](https://hackerone.com/ibb-data).
See [BUG-BOUNTY](https://curl.haxx.se/docs/bugbounty.html) for details on the
bug bounty program.

View file

@ -53,9 +53,9 @@
Note that these weak ciphers are identified as flawed. For example, this
includes symmetric ciphers with less than 128 bit keys and RC4.
WinSSL in Windows XP is not able to connect to servers that no longer
Schannel in Windows XP is not able to connect to servers that no longer
support the legacy handshakes and algorithms used by those versions, so we
advice against building curl to use WinSSL on really old Windows versions.
advice against building curl to use Schannel on really old Windows versions.
References:
@ -77,9 +77,9 @@
Some SSL backends may do certificate revocation checks (CRL, OCSP, etc)
depending on the OS or build configuration. The --ssl-no-revoke option was
introduced in 7.44.0 to disable revocation checking but currently is only
supported for WinSSL (the native Windows SSL library), with an exception in
the case of Windows' Untrusted Publishers blacklist which it seems can't be
bypassed. This option may have broader support to accommodate other SSL
supported for Schannel (the native Windows SSL library), with an exception
in the case of Windows' Untrusted Publishers blacklist which it seems can't
be bypassed. This option may have broader support to accommodate other SSL
backends in the future.
References:

File diff suppressed because it is too large Load diff

View file

@ -17,11 +17,10 @@
All bugs documented in the KNOWN_BUGS document are subject for fixing!
1. libcurl
1.2 More data sharing
1.1 TFO support on Windows
1.2 Consult %APPDATA% also for .netrc
1.3 struct lifreq
1.4 signal-based resolver timeouts
1.5 get rid of PATH_MAX
1.6 Modified buffer size approach
1.7 Support HTTP/2 for HTTP(S) proxies
1.8 CURLOPT_RESOLVE for any port number
1.9 Cache negative name resolves
@ -34,16 +33,15 @@
1.16 Try to URL encode given URL
1.17 Add support for IRIs
1.18 try next proxy if one doesn't work
1.19 Timeout idle connections from the pool
1.20 SRV and URI DNS records
1.21 Have the URL API offer IDN decoding
1.22 CURLINFO_PAUSE_STATE
1.23 Offer API to flush the connection pool
1.24 TCP Fast Open for windows
1.25 Expose tried IP addresses that failed
1.26 CURL_REFUSE_CLEARTEXT
1.27 hardcode the "localhost" addresses
1.28 FD_CLOEXEC
1.29 Upgrade to websockets
1.30 config file parsing
2. libcurl - multi interface
2.1 More non-blocking
@ -60,7 +58,6 @@
4.1 HOST
4.2 Alter passive/active on failure and retry
4.3 Earlier bad letter detection
4.4 REST for large files
4.5 ASCII support
4.6 GSSAPI via Windows SSPI
4.7 STAT for LIST without data connection
@ -68,12 +65,9 @@
5. HTTP
5.1 Better persistency for HTTP 1.0
5.2 support FF3 sqlite cookie files
5.3 Rearrange request header order
5.4 Allow SAN names in HTTP/2 server push
5.5 auth= in URLs
5.6 Refuse "downgrade" redirects
5.7 QUIC
5.8 Leave secure cookies alone
6. TELNET
6.1 ditch stdin
@ -81,12 +75,10 @@
6.3 feature negotiation debug data
7. SMTP
7.1 Pipelining
7.2 Enhanced capability support
7.3 Add CURLOPT_MAIL_CLIENT option
8. POP3
8.1 Pipelining
8.2 Enhanced capability support
9. IMAP
@ -102,10 +94,8 @@
11.4 Create remote directories
12. New protocols
12.1 RSYNC
13. SSL
13.1 Disable specific versions
13.2 Provide mutex locking API
13.3 Support in-memory certs/ca certs/keys
13.4 Cache/share OpenSSL contexts
@ -113,20 +103,18 @@
13.6 Provide callback for cert verification
13.7 improve configure --with-ssl
13.8 Support DANE
13.9 Configurable loading of OpenSSL configuration file
13.10 Support Authority Information Access certificate extension (AIA)
13.11 Support intermediate & root pinning for PINNEDPUBLICKEY
13.12 Support HSTS
13.13 Support HPKP
13.14 Support the clienthello extension
14. GnuTLS
14.1 SSL engine stuff
14.2 check connection
15. WinSSL/SChannel
15.1 Add support for client certificate authentication
15.3 Add support for the --ciphers option
15.4 Add option to disable client certificate auto-send
16. SASL
16.1 Other authentication mechanisms
@ -135,7 +123,7 @@
17. SSH protocols
17.1 Multiplexing
17.2 SFTP performance
17.2 Handle growing SFTP files
17.3 Support better than MD5 hostkey hash
17.4 Support CURLOPT_PREQUOTE
@ -143,16 +131,12 @@
18.1 sync
18.2 glob posts
18.3 prevent file overwriting
18.4 simultaneous parallel transfers
18.5 UTF-8 filenames in Content-Disposition
18.6 warning when setting an option
18.7 warning if curl version is not in sync with libcurl version
18.8 offer color-coded HTTP header output
18.7 at least N milliseconds between requests
18.9 Choose the name of file in braces for complex URLs
18.10 improve how curl works in a windows console window
18.11 Windows: set attribute 'archive' for completed downloads
18.12 keep running, read instructions from pipe/socket
18.13 support metalink in http headers
18.14 --fail without --location should treat 3xx as a failure
18.15 --retry should resume
18.16 send only part of --data
18.17 consider file name from the redirected URL with -O ?
@ -163,6 +147,7 @@
19. Build
19.1 roffit
19.2 Enable PIE and RELRO by default
19.3 cmake test suite improvements
20. Test suite
20.1 SSL tunnel
@ -191,10 +176,18 @@
1. libcurl
1.2 More data sharing
1.1 TFO support on Windows
curl_share_* functions already exist and work, and they can be extended to
share more. For example, enable sharing of the ares channel.
TCP Fast Open is supported on several platforms but not on Windows. Work on
this was once started but never finished.
See https://github.com/curl/curl/pull/3378
1.2 Consult %APPDATA% also for .netrc
%APPDATA%\.netrc is not considered when running on Windows. Shouldn't it?
See https://github.com/curl/curl/issues/4016
1.3 struct lifreq
@ -202,52 +195,21 @@
SIOCGIFADDR on newer Solaris versions as they claim the latter is obsolete.
To support IPv6 interface addresses for network interfaces properly.
1.4 signal-based resolver timeouts
libcurl built without an asynchronous resolver library uses alarm() to time
out DNS lookups. When a timeout occurs, this causes libcurl to jump from the
signal handler back into the library with a sigsetjmp, which effectively
causes libcurl to continue running within the signal handler. This is
non-portable and could cause problems on some platforms. A discussion on the
problem is available at https://curl.haxx.se/mail/lib-2008-09/0197.html
Also, alarm() provides timeout resolution only to the nearest second. alarm
ought to be replaced by setitimer on systems that support it.
1.5 get rid of PATH_MAX
Having code use and rely on PATH_MAX is not nice:
https://insanecoding.blogspot.com/2007/11/pathmax-simply-isnt.html
Currently the SSH based code uses it a bit, but to remove PATH_MAX from there
we need libssh2 to properly tell us when we pass in a too small buffer and
its current API (as of libssh2 1.2.7) doesn't.
1.6 Modified buffer size approach
Current libcurl allocates a fixed 16K size buffer for download and an
additional 16K for upload. They are always unconditionally part of the easy
handle. If CRLF translations are requested, an additional 32K "scratch
buffer" is allocated. A total of 64K transfer buffers in the worst case.
First, while the handles are not actually in use these buffers could be freed
so that lingering handles just kept in queues or whatever waste less memory.
Secondly, SFTP is a protocol that needs to handle many ~30K blocks at once
since each need to be individually acked and therefore libssh2 must be
allowed to send (or receive) many separate ones in parallel to achieve high
transfer speeds. A current libcurl build with a 16K buffer makes that
impossible, but one with a 512K buffer will reach MUCH faster transfers. But
allocating 512K unconditionally for all buffers just in case they would like
to do fast SFTP transfers at some point is not a good solution either.
Dynamically allocate buffer size depending on protocol in use in combination
with freeing it after each individual transfer? Other suggestions?
Currently the libssh2 SSH based code uses it, but to remove PATH_MAX from
there we need libssh2 to properly tell us when we pass in a too small buffer
and its current API (as of libssh2 1.2.7) doesn't.
1.7 Support HTTP/2 for HTTP(S) proxies
Support for doing HTTP/2 to HTTP and HTTPS proxies is still missing.
See https://github.com/curl/curl/issues/3570
1.8 CURLOPT_RESOLVE for any port number
This option allows applications to set a replacement IP address for a given
@ -359,27 +321,11 @@
https://github.com/curl/curl/issues/896
1.19 Timeout idle connections from the pool
libcurl currently keeps connections in its connection pool for an indefinite
period of time, until it either gets reused, gets noticed that it has been
closed by the server or gets pruned to make room for a new connection.
To reduce overhead (especially for when we add monitoring of the connections
in the pool), we should introduce a timeout so that connections that have
been idle for N seconds get closed.
1.20 SRV and URI DNS records
Offer support for resolving SRV and URI DNS records for libcurl to know which
server to connect to for various protocols (including HTTP!).
1.21 Have the URL API offer IDN decoding
Similar to how URL decoding/encoding is done, we could have URL functions to
convert IDN host names to punycode (probably not the reverse).
https://github.com/curl/curl/issues/3232
1.22 CURLINFO_PAUSE_STATE
Return information about the transfer's current pause state, in both
@ -404,21 +350,6 @@
https://github.com/curl/curl/issues/2126
1.26 CURL_REFUSE_CLEARTEXT
An environment variable that when set will make libcurl refuse to use any
cleartext network protocol. That's all non-encrypted ones (FTP, HTTP, Gopher,
etc). By adding the check to libcurl and not just curl, this environment
variable can then help users to block all libcurl-using programs from
accessing the network using unsafe protocols.
The variable could be given some sort of syntax or different levels and be
used to also allow for example users to refuse libcurl to do transfers with
HTTPS certificate checks disabled.
It could also automatically refuse usernames in URLs when set
(see CURLOPT_DISALLOW_USERNAME_IN_URL)
1.27 hardcode the "localhost" addresses
There's this new spec getting adopted that says "localhost" should always and
@ -439,6 +370,22 @@
https://github.com/curl/curl/issues/2252
1.29 Upgrade to websockets
libcurl could offer a smoother path to get to a websocket connection.
See https://github.com/curl/curl/issues/3523
Michael Kaufmann suggestion here:
https://curl.haxx.se/video/curlup-2017/2017-03-19_05_Michael_Kaufmann_Websocket_support_for_curl.mp4
1.30 config file parsing
Consider providing an API, possibly in a separate companion library, for
parsing a config file like curl's -K/--config option to allow applications to
get the same ability to read curl options from files.
See https://github.com/curl/curl/issues/3698
2. libcurl - multi interface
2.1 More non-blocking
@ -520,12 +467,6 @@
Make the detection of (bad) %0d and %0a codes in FTP URL parts earlier in the
process to avoid doing a resolve and connect in vain.
4.4 REST for large files
REST fix for servers not behaving well on >2GB requests. This should fail if
the server doesn't set the pointer to the requested index. The tricky
(impossible?) part is to figure out if the server did the right thing or not.
4.5 ASCII support
FTP ASCII transfers do not follow RFC959. They don't convert the data
@ -558,12 +499,6 @@
"Better" support for persistent connections over HTTP 1.0
https://curl.haxx.se/bug/feature.cgi?id=1089001
5.2 support FF3 sqlite cookie files
Firefox 3 is changing from its former format to a a sqlite database instead.
We should consider how (lib)curl can/should support this.
https://curl.haxx.se/bug/feature.cgi?id=1871388
5.3 Rearrange request header order
Server implementors often make an effort to detect browser and to reject
@ -576,6 +511,15 @@
headers use a default value so only headers that need to be moved have to be
specified.
5.4 Allow SAN names in HTTP/2 server push
curl only allows HTTP/2 push promise if the provided :authority header value
exactly matches the host name given in the URL. It could be extended to allow
any name that would match the Subject Alternative Names in the server's TLS
certificate.
See https://github.com/curl/curl/pull/3581
5.5 auth= in URLs
Add the ability to specify the preferred authentication mechanism to use by
@ -583,43 +527,19 @@
For example:
http://test:pass;auth=NTLM@example.com would be equivalent to specifying --user
test:pass;auth=NTLM or --user test:pass --ntlm from the command line.
http://test:pass;auth=NTLM@example.com would be equivalent to specifying
--user test:pass;auth=NTLM or --user test:pass --ntlm from the command line.
Additionally this should be implemented for proxy base URLs as well.
5.6 Refuse "downgrade" redirects
See https://github.com/curl/curl/issues/226
Consider a way to tell curl to refuse to "downgrade" protocol with a redirect
and/or possibly a bit that refuses redirect to change protocol completely.
5.7 QUIC
The standardization process of QUIC has been taken to the IETF and can be
followed on the [IETF QUIC Mailing
list](https://www.ietf.org/mailman/listinfo/quic). I'd like us to get on the
bandwagon. Ideally, this would be done with a separate library/project to
handle the binary/framing layer in a similar fashion to how HTTP/2 is
implemented. This, to allow other projects to benefit from the work and to
thus broaden the interest and chance of others to participate.
5.8 Leave secure cookies alone
Non-secure origins (HTTP sites) should not be allowed to set or modify
cookies with the 'secure' property:
https://tools.ietf.org/html/draft-ietf-httpbis-cookie-alone-01
6. TELNET
6.1 ditch stdin
Reading input (to send to the remote server) on stdin is a crappy solution for
library purposes. We need to invent a good way for the application to be able
to provide the data to send.
Reading input (to send to the remote server) on stdin is a crappy solution
for library purposes. We need to invent a good way for the application to be
able to provide the data to send.
6.2 ditch telnet-specific select
@ -629,15 +549,11 @@ to provide the data to send.
6.3 feature negotiation debug data
Add telnet feature negotiation data to the debug callback as header data.
Add telnet feature negotiation data to the debug callback as header data.
7. SMTP
7.1 Pipelining
Add support for pipelining emails.
7.2 Enhanced capability support
Add the ability, for an application that uses libcurl, to obtain the list of
@ -656,10 +572,6 @@ to provide the data to send.
8. POP3
8.1 Pipelining
Add support for pipelining commands.
8.2 Enhanced capability support
Add the ability, for an application that uses libcurl, to obtain the list of
@ -704,18 +616,8 @@ that doesn't exist on the server, just like --ftp-create-dirs.
12. New protocols
12.1 RSYNC
There's no RFC for the protocol or an URI/URL format. An implementation
should most probably use an existing rsync library, such as librsync.
13. SSL
13.1 Disable specific versions
Provide an option that allows for disabling specific SSL versions, such as
SSLv2 https://curl.haxx.se/bug/feature.cgi?id=1767276
13.2 Provide mutex locking API
Provide a libcurl API for setting mutex callbacks in the underlying SSL
@ -780,17 +682,6 @@ that doesn't exist on the server, just like --ftp-create-dirs.
Björn Stenberg wrote a separate initial take on DANE that was never
completed.
13.9 Configurable loading of OpenSSL configuration file
libcurl calls the OpenSSL function CONF_modules_load_file() in openssl.c,
Curl_ossl_init(). "We regard any changes in the OpenSSL configuration as a
security risk or at least as unnecessary."
Please add a configuration switch or something similar to disable the
CONF_modules_load_file() call.
See https://github.com/curl/curl/issues/2724
13.10 Support Authority Information Access certificate extension (AIA)
AIA can provide various things like CRLs but more importantly information
@ -823,21 +714,6 @@ that doesn't exist on the server, just like --ftp-create-dirs.
Doc: https://developer.mozilla.org/en-US/docs/Web/Security/HTTP_strict_transport_security
RFC 6797: https://tools.ietf.org/html/rfc6797
13.13 Support HPKP
"HTTP Public Key Pinning" is TOFU (trust on first use), time-based
features indicated by a HTTP header send by the webserver. It's purpose is
to prevent Man-in-the-middle attacks by trusted CAs by allowing webadmins
to specify which CAs/certificates/public keys to trust when connection to
their websites.
It can be build based on PINNEDPUBLICKEY.
Wikipedia: https://en.wikipedia.org/wiki/HTTP_Public_Key_Pinning
OWASP: https://www.owasp.org/index.php/Certificate_and_Public_Key_Pinning
Doc: https://developer.mozilla.org/de/docs/Web/Security/Public_Key_Pinning
RFC: https://tools.ietf.org/html/draft-ietf-websec-key-pinning-21
13.14 Support the clienthello extension
Certain stupid networks and middle boxes have a problem with SSL handshake
@ -850,10 +726,6 @@ that doesn't exist on the server, just like --ftp-create-dirs.
14. GnuTLS
14.1 SSL engine stuff
Is this even possible?
14.2 check connection
Add a way to check if the connection seems to be alive, to correspond to the
@ -883,6 +755,19 @@ that doesn't exist on the server, just like --ftp-create-dirs.
- Specifying Schannel Ciphers and Cipher Strengths
https://msdn.microsoft.com/en-us/library/windows/desktop/aa380161.aspx
15.4 Add option to disable client certificate auto-send
Microsoft says "By default, Schannel will, with no notification to the client,
attempt to locate a client certificate and send it to the server." That could
be considered a privacy violation and unexpected.
Some Windows users have come to expect that default behavior and to change the
default to make it consistent with other SSL backends would be a breaking
change. An option should be added that can be used to disable the default
Schannel auto-send behavior.
https://github.com/curl/curl/issues/2262
16. SASL
16.1 Other authentication mechanisms
@ -915,10 +800,15 @@ that doesn't exist on the server, just like --ftp-create-dirs.
To fix this, libcurl would have to detect an existing connection and "attach"
the new transfer to the existing one.
17.2 SFTP performance
17.2 Handle growing SFTP files
libcurl's SFTP transfer performance is sub par and can be improved, mostly by
the approach mentioned in "1.6 Modified buffer size approach".
The SFTP code in libcurl checks the file size *before* a transfer starts and
then proceeds to transfer exactly that amount of data. If the remote file
grows while the tranfer is in progress libcurl won't notice and will not
adapt. The OpenSSH SFTP command line tool does and libcurl could also just
attempt to download more to see if there is more to get...
https://github.com/curl/curl/issues/4344
17.3 Support better than MD5 hostkey hash
@ -958,16 +848,6 @@ that doesn't exist on the server, just like --ftp-create-dirs.
existing). So that index.html becomes first index.html.1 and then
index.html.2 etc.
18.4 simultaneous parallel transfers
The client could be told to use maximum N simultaneous parallel transfers and
then just make sure that happens. It should of course not make more than one
connection to the same remote host. This would require the client to use the
multi interface. https://curl.haxx.se/bug/feature.cgi?id=1558595
Using the multi interface would also allow properly using parallel transfers
with HTTP/2 and supporting HTTP/2 server push from the command line.
18.5 UTF-8 filenames in Content-Disposition
RFC 6266 documents how UTF-8 names can be passed to a client in the
@ -975,24 +855,16 @@ that doesn't exist on the server, just like --ftp-create-dirs.
https://github.com/curl/curl/issues/1888
18.6 warning when setting an option
18.7 at least N milliseconds between requests
Display a warning when libcurl returns an error when setting an option.
This can be useful to tell when support for a particular feature hasn't been
compiled into the library.
Allow curl command lines issue a lot of request against services that limit
users to no more than N requests/second or similar. Could be implemented with
an option asking that at least a certain time has elapsed since the previous
request before the next one will be performed. Example:
18.7 warning if curl version is not in sync with libcurl version
$ curl "https://example.com/api?input=[1-1000]" -d yadayada --after 500
This is usually a sign of a funny, weird or unexpected install situations
that aren't always quickly nor easily detected by users. curl and libcurl are
always released in sync and should use the same version numbers unless very
special situations.
18.8 offer color-coded HTTP header output
By offering different color output on the header name and the header
contents, they could be made more readable and thus help users working on
HTTP services.
See https://github.com/curl/curl/issues/3920
18.9 Choose the name of file in braces for complex URLs
@ -1012,6 +884,17 @@ that doesn't exist on the server, just like --ftp-create-dirs.
window, the transfer is interrupted and can get disconnected. This can
probably be improved. See https://github.com/curl/curl/issues/322
18.11 Windows: set attribute 'archive' for completed downloads
The archive bit (FILE_ATTRIBUTE_ARCHIVE, 0x20) separates files that shall be
backed up from those that are either not ready or have not changed.
Downloads in progress are neither ready to be backed up, nor should they be
opened by a different process. Only after a download has been completed it's
sensible to include it in any integer snapshot or backup of the system.
See https://github.com/curl/curl/issues/3354
18.12 keep running, read instructions from pipe/socket
Provide an option that makes curl not exit after the last URL (or even work
@ -1020,30 +903,6 @@ that doesn't exist on the server, just like --ftp-create-dirs.
invoke can talk to the still running instance and ask for transfers to get
done, and thus maintain its connection pool, DNS cache and more.
18.13 support metalink in http headers
Curl has support for downloading a metalink xml file, processing it, and then
downloading the target of the metalink. This is done via the --metalink option.
It would be nice if metalink also supported downloading via metalink
information that is stored in HTTP headers (RFC 6249). Theoretically this could
also be supported with the --metalink option.
See https://tools.ietf.org/html/rfc6249
See also https://lists.gnu.org/archive/html/bug-wget/2015-06/msg00034.html for
an implematation of this in wget.
18.14 --fail without --location should treat 3xx as a failure
To allow a command line like this to detect a redirect and consider it a
failure:
curl -v --fail -O https://example.com/curl-7.48.0.tar.gz
... --fail must treat 3xx responses as failures too. The least problematic
way to implement this is probably to add that new logic in the command line
tool only and not in the underlying CURLOPT_FAILONERROR logic.
18.15 --retry should resume
When --retry is used and curl actually retries transfer, it should use the
@ -1126,6 +985,13 @@ that doesn't exist on the server, just like --ftp-create-dirs.
to no impact, neither on the performance nor on the general functionality of
curl.
19.3 cmake test suite improvements
The cmake build doesn't support 'make show' so it doesn't know which tests
are in the makefile or not (making appveyor builds do many false warnings
about it) nor does it support running the test suite if building out-of-tree.
See https://github.com/curl/curl/issues/3109
20. Test suite
@ -1152,17 +1018,17 @@ that doesn't exist on the server, just like --ftp-create-dirs.
20.5 Add support for concurrent connections
Tests 836, 882 and 938 were designed to verify that separate connections aren't
used when using different login credentials in protocols that shouldn't re-use
a connection under such circumstances.
Tests 836, 882 and 938 were designed to verify that separate connections
aren't used when using different login credentials in protocols that
shouldn't re-use a connection under such circumstances.
Unfortunately, ftpserver.pl doesn't appear to support multiple concurrent
connections. The read while() loop seems to loop until it receives a disconnect
from the client, where it then enters the waiting for connections loop. When
the client opens a second connection to the server, the first connection hasn't
been dropped (unless it has been forced - which we shouldn't do in these tests)
and thus the wait for connections loop is never entered to receive the second
connection.
connections. The read while() loop seems to loop until it receives a
disconnect from the client, where it then enters the waiting for connections
loop. When the client opens a second connection to the server, the first
connection hasn't been dropped (unless it has been forced - which we
shouldn't do in these tests) and thus the wait for connections loop is never
entered to receive the second connection.
20.6 Use the RFC6265 test suite

View file

@ -113,8 +113,7 @@ build_triplet = @build@
host_triplet = @host@
subdir = docs/cmdline-opts
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/ax_code_coverage.m4 \
$(top_srcdir)/m4/ax_compile_check_sizeof.m4 \
am__aclocal_m4_deps = $(top_srcdir)/m4/ax_compile_check_sizeof.m4 \
$(top_srcdir)/m4/curl-compilers.m4 \
$(top_srcdir)/m4/curl-confopts.m4 \
$(top_srcdir)/m4/curl-functions.m4 \
@ -178,12 +177,6 @@ CC = @CC@
CCDEPMODE = @CCDEPMODE@
CFLAGS = @CFLAGS@
CFLAG_CURL_SYMBOL_HIDING = @CFLAG_CURL_SYMBOL_HIDING@
CODE_COVERAGE_CFLAGS = @CODE_COVERAGE_CFLAGS@
CODE_COVERAGE_CPPFLAGS = @CODE_COVERAGE_CPPFLAGS@
CODE_COVERAGE_CXXFLAGS = @CODE_COVERAGE_CXXFLAGS@
CODE_COVERAGE_ENABLED = @CODE_COVERAGE_ENABLED@
CODE_COVERAGE_LDFLAGS = @CODE_COVERAGE_LDFLAGS@
CODE_COVERAGE_LIBS = @CODE_COVERAGE_LIBS@
CONFIGURE_OPTIONS = @CONFIGURE_OPTIONS@
CPP = @CPP@
CPPFLAGS = @CPPFLAGS@
@ -225,14 +218,15 @@ ENABLE_SHARED = @ENABLE_SHARED@
ENABLE_STATIC = @ENABLE_STATIC@
EXEEXT = @EXEEXT@
FGREP = @FGREP@
FISH_FUNCTIONS_DIR = @FISH_FUNCTIONS_DIR@
GCOV = @GCOV@
GENHTML = @GENHTML@
GREP = @GREP@
HAVE_BROTLI = @HAVE_BROTLI@
HAVE_GNUTLS_SRP = @HAVE_GNUTLS_SRP@
HAVE_LDAP_SSL = @HAVE_LDAP_SSL@
HAVE_LIBZ = @HAVE_LIBZ@
HAVE_OPENSSL_SRP = @HAVE_OPENSSL_SRP@
HAVE_PROTO_BSDSOCKET_H = @HAVE_PROTO_BSDSOCKET_H@
IDN_ENABLED = @IDN_ENABLED@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
@ -293,8 +287,6 @@ STRIP = @STRIP@
SUPPORT_FEATURES = @SUPPORT_FEATURES@
SUPPORT_PROTOCOLS = @SUPPORT_PROTOCOLS@
USE_ARES = @USE_ARES@
USE_CYASSL = @USE_CYASSL@
USE_DARWINSSL = @USE_DARWINSSL@
USE_GNUTLS = @USE_GNUTLS@
USE_GNUTLS_NETTLE = @USE_GNUTLS_NETTLE@
USE_LIBRTMP = @USE_LIBRTMP@
@ -303,12 +295,17 @@ USE_LIBSSH2 = @USE_LIBSSH2@
USE_MBEDTLS = @USE_MBEDTLS@
USE_MESALINK = @USE_MESALINK@
USE_NGHTTP2 = @USE_NGHTTP2@
USE_NGHTTP3 = @USE_NGHTTP3@
USE_NGTCP2 = @USE_NGTCP2@
USE_NGTCP2_CRYPTO_OPENSSL = @USE_NGTCP2_CRYPTO_OPENSSL@
USE_NSS = @USE_NSS@
USE_OPENLDAP = @USE_OPENLDAP@
USE_POLARSSL = @USE_POLARSSL@
USE_QUICHE = @USE_QUICHE@
USE_SCHANNEL = @USE_SCHANNEL@
USE_SECTRANSP = @USE_SECTRANSP@
USE_UNIX_SOCKETS = @USE_UNIX_SOCKETS@
USE_WINDOWS_SSPI = @USE_WINDOWS_SSPI@
USE_WOLFSSL = @USE_WOLFSSL@
VERSION = @VERSION@
VERSIONNUM = @VERSIONNUM@
ZLIB_LIBS = @ZLIB_LIBS@
@ -370,55 +367,214 @@ top_builddir = @top_builddir@
top_srcdir = @top_srcdir@
AUTOMAKE_OPTIONS = foreign no-dependencies
MANPAGE = $(top_builddir)/docs/curl.1
DPAGES = abstract-unix-socket.d anyauth.d append.d basic.d cacert.d capath.d cert.d \
cert-status.d cert-type.d ciphers.d compressed.d compressed-ssh.d \
config.d doh-url.d \
connect-timeout.d connect-to.d continue-at.d cookie.d cookie-jar.d \
create-dirs.d crlf.d crlfile.d data-ascii.d data-binary.d data.d \
data-raw.d data-urlencode.d delegation.d digest.d disable.d \
disable-eprt.d disable-epsv.d dns-interface.d dns-ipv4-addr.d \
dns-ipv6-addr.d dns-servers.d dump-header.d egd-file.d engine.d \
expect100-timeout.d fail.d fail-early.d false-start.d \
form.d form-string.d ftp-account.d ftp-alternative-to-user.d \
ftp-create-dirs.d ftp-method.d ftp-pasv.d ftp-port.d ftp-pret.d \
ftp-skip-pasv-ip.d ftp-ssl-ccc.d ftp-ssl-ccc-mode.d ftp-ssl-control.d \
get.d globoff.d \
happy-eyeballs-timeout-ms.d \
head.d header.d help.d hostpubmd5.d http1.0.d \
http1.1.d http2.d http2-prior-knowledge.d ignore-content-length.d \
include.d insecure.d interface.d ipv4.d ipv6.d junk-session-cookies.d \
keepalive-time.d key.d key-type.d krb.d libcurl.d limit-rate.d \
list-only.d local-port.d location.d location-trusted.d \
login-options.d mail-auth.d mail-from.d mail-rcpt.d manual.d \
max-filesize.d max-redirs.d max-time.d metalink.d negotiate.d netrc.d \
netrc-file.d netrc-optional.d next.d no-alpn.d no-buffer.d \
no-keepalive.d no-npn.d noproxy.d no-sessionid.d ntlm.d ntlm-wb.d \
oauth2-bearer.d output.d pass.d path-as-is.d pinnedpubkey.d post301.d \
post302.d post303.d preproxy.d progress-bar.d proto.d proto-default.d \
proto-redir.d proxy1.0.d proxy-anyauth.d proxy-basic.d proxy-cacert.d \
proxy-capath.d proxy-cert.d proxy-cert-type.d proxy-ciphers.d \
proxy-crlfile.d proxy.d proxy-digest.d proxy-header.d \
proxy-insecure.d proxy-key.d proxy-key-type.d proxy-negotiate.d \
proxy-ntlm.d proxy-pass.d proxy-service-name.d \
proxy-ssl-allow-beast.d proxy-tlsauthtype.d proxy-tlspassword.d \
proxy-tlsuser.d proxy-tlsv1.d proxytunnel.d proxy-user.d pubkey.d \
quote.d random-file.d range.d raw.d referer.d remote-header-name.d \
remote-name-all.d remote-name.d remote-time.d request.d resolve.d \
retry-connrefused.d retry.d retry-delay.d retry-max-time.d sasl-ir.d \
service-name.d show-error.d silent.d socks4a.d socks4.d socks5.d \
socks5-basic.d socks5-gssapi.d proxy-pinnedpubkey.d \
socks5-gssapi-nec.d socks5-gssapi-service.d socks5-hostname.d \
speed-limit.d speed-time.d ssl-allow-beast.d ssl.d ssl-no-revoke.d \
ssl-reqd.d sslv2.d sslv3.d stderr.d suppress-connect-headers.d \
tcp-fastopen.d tcp-nodelay.d \
telnet-option.d tftp-blksize.d tftp-no-options.d time-cond.d \
tls-max.d \
tlsauthtype.d tlspassword.d tlsuser.d tlsv1.0.d tlsv1.1.d tlsv1.2.d \
tlsv1.3.d tlsv1.d trace-ascii.d trace.d trace-time.d tr-encoding.d \
unix-socket.d upload-file.d url.d use-ascii.d user-agent.d user.d \
verbose.d version.d write-out.d xattr.d request-target.d \
styled-output.d tls13-ciphers.d proxy-tls13-ciphers.d \
disallow-username-in-url.d haproxy-protocol.d
DPAGES = \
abstract-unix-socket.d \
alt-svc.d \
anyauth.d \
append.d basic.d \
cacert.d capath.d \
cert-status.d \
cert-type.d \
cert.d \
ciphers.d \
compressed-ssh.d \
compressed.d \
config.d \
connect-timeout.d \
connect-to.d \
continue-at.d \
cookie-jar.d \
cookie.d \
create-dirs.d \
crlf.d crlfile.d \
data-ascii.d \
data-binary.d \
data-urlencode.d \
data.d data-raw.d \
delegation.d \
digest.d \
disable-eprt.d \
disable-epsv.d \
disable.d \
disallow-username-in-url.d \
dns-interface.d \
dns-ipv4-addr.d \
dns-ipv6-addr.d \
dns-servers.d \
doh-url.d \
dump-header.d \
egd-file.d \
engine.d \
expect100-timeout.d \
fail-early.d \
fail.d \
false-start.d \
form-string.d \
form.d \
ftp-account.d \
ftp-alternative-to-user.d \
ftp-create-dirs.d \
ftp-method.d \
ftp-pasv.d \
ftp-port.d \
ftp-pret.d \
ftp-skip-pasv-ip.d \
ftp-ssl-ccc-mode.d \
ftp-ssl-ccc.d \
ftp-ssl-control.d \
get.d globoff.d \
happy-eyeballs-timeout-ms.d \
haproxy-protocol.d \
head.d header.d \
help.d \
hostpubmd5.d \
http0.9.d \
http1.0.d \
http1.1.d http2.d \
http2-prior-knowledge.d \
http3.d \
ignore-content-length.d \
include.d \
insecure.d \
interface.d \
ipv4.d ipv6.d \
junk-session-cookies.d \
keepalive-time.d \
key.d key-type.d \
krb.d libcurl.d \
limit-rate.d \
list-only.d \
local-port.d \
location-trusted.d \
location.d \
login-options.d \
mail-auth.d \
mail-from.d \
mail-rcpt.d \
manual.d \
max-filesize.d \
max-redirs.d \
max-time.d \
metalink.d \
negotiate.d \
netrc-file.d \
netrc-optional.d \
netrc.d \
next.d no-alpn.d \
no-buffer.d \
no-keepalive.d \
no-npn.d \
no-progress-meter.d \
no-sessionid.d \
noproxy.d \
ntlm.d ntlm-wb.d \
oauth2-bearer.d \
output.d \
parallel.d \
pass.d \
parallel-max.d \
path-as-is.d \
pinnedpubkey.d \
post301.d \
post302.d \
post303.d \
preproxy.d \
progress-bar.d \
proto-default.d \
proto-redir.d \
proto.d \
proxy-anyauth.d \
proxy-basic.d \
proxy-cacert.d \
proxy-capath.d \
proxy-cert-type.d \
proxy-cert.d \
proxy-ciphers.d \
proxy-crlfile.d \
proxy-digest.d \
proxy-header.d \
proxy-insecure.d \
proxy-key-type.d \
proxy-key.d \
proxy-negotiate.d \
proxy-ntlm.d \
proxy-pass.d \
proxy-pinnedpubkey.d \
proxy-service-name.d \
proxy-ssl-allow-beast.d \
proxy-tls13-ciphers.d \
proxy-tlsauthtype.d \
proxy-tlspassword.d \
proxy-tlsuser.d \
proxy-tlsv1.d \
proxy-user.d \
proxy.d \
proxy1.0.d \
proxytunnel.d \
pubkey.d quote.d \
random-file.d \
range.d raw.d \
referer.d \
remote-header-name.d \
remote-name-all.d \
remote-name.d \
remote-time.d \
request-target.d \
request.d \
resolve.d \
retry-connrefused.d \
retry-delay.d \
retry-max-time.d \
retry.d \
sasl-authzid.d \
sasl-ir.d \
service-name.d \
show-error.d \
silent.d \
socks4.d socks5.d \
socks4a.d \
socks5-basic.d \
socks5-gssapi-nec.d \
socks5-gssapi-service.d \
socks5-gssapi.d \
socks5-hostname.d \
speed-limit.d \
speed-time.d \
ssl-allow-beast.d \
ssl-no-revoke.d \
ssl-reqd.d \
ssl.d \
sslv2.d sslv3.d \
stderr.d \
styled-output.d \
suppress-connect-headers.d \
tcp-fastopen.d \
tcp-nodelay.d \
telnet-option.d \
tftp-blksize.d \
tftp-no-options.d \
time-cond.d \
tls-max.d \
tls13-ciphers.d \
tlsauthtype.d \
tlspassword.d \
tlsuser.d \
tlsv1.0.d \
tlsv1.1.d \
tlsv1.2.d \
tlsv1.3.d tlsv1.d \
tr-encoding.d \
trace-ascii.d \
trace-time.d \
trace.d \
unix-socket.d \
upload-file.d \
url.d use-ascii.d \
user-agent.d \
user.d verbose.d \
version.d \
write-out.d \
xattr.d
OTHERPAGES = page-footer page-header
EXTRA_DIST = $(DPAGES) MANPAGE.md gen.pl $(OTHERPAGES) CMakeLists.txt

View file

@ -1,53 +1,212 @@
# Shared between Makefile.am and CMakeLists.txt
DPAGES = abstract-unix-socket.d anyauth.d append.d basic.d cacert.d capath.d cert.d \
cert-status.d cert-type.d ciphers.d compressed.d compressed-ssh.d \
config.d doh-url.d \
connect-timeout.d connect-to.d continue-at.d cookie.d cookie-jar.d \
create-dirs.d crlf.d crlfile.d data-ascii.d data-binary.d data.d \
data-raw.d data-urlencode.d delegation.d digest.d disable.d \
disable-eprt.d disable-epsv.d dns-interface.d dns-ipv4-addr.d \
dns-ipv6-addr.d dns-servers.d dump-header.d egd-file.d engine.d \
expect100-timeout.d fail.d fail-early.d false-start.d \
form.d form-string.d ftp-account.d ftp-alternative-to-user.d \
ftp-create-dirs.d ftp-method.d ftp-pasv.d ftp-port.d ftp-pret.d \
ftp-skip-pasv-ip.d ftp-ssl-ccc.d ftp-ssl-ccc-mode.d ftp-ssl-control.d \
get.d globoff.d \
happy-eyeballs-timeout-ms.d \
head.d header.d help.d hostpubmd5.d http1.0.d \
http1.1.d http2.d http2-prior-knowledge.d ignore-content-length.d \
include.d insecure.d interface.d ipv4.d ipv6.d junk-session-cookies.d \
keepalive-time.d key.d key-type.d krb.d libcurl.d limit-rate.d \
list-only.d local-port.d location.d location-trusted.d \
login-options.d mail-auth.d mail-from.d mail-rcpt.d manual.d \
max-filesize.d max-redirs.d max-time.d metalink.d negotiate.d netrc.d \
netrc-file.d netrc-optional.d next.d no-alpn.d no-buffer.d \
no-keepalive.d no-npn.d noproxy.d no-sessionid.d ntlm.d ntlm-wb.d \
oauth2-bearer.d output.d pass.d path-as-is.d pinnedpubkey.d post301.d \
post302.d post303.d preproxy.d progress-bar.d proto.d proto-default.d \
proto-redir.d proxy1.0.d proxy-anyauth.d proxy-basic.d proxy-cacert.d \
proxy-capath.d proxy-cert.d proxy-cert-type.d proxy-ciphers.d \
proxy-crlfile.d proxy.d proxy-digest.d proxy-header.d \
proxy-insecure.d proxy-key.d proxy-key-type.d proxy-negotiate.d \
proxy-ntlm.d proxy-pass.d proxy-service-name.d \
proxy-ssl-allow-beast.d proxy-tlsauthtype.d proxy-tlspassword.d \
proxy-tlsuser.d proxy-tlsv1.d proxytunnel.d proxy-user.d pubkey.d \
quote.d random-file.d range.d raw.d referer.d remote-header-name.d \
remote-name-all.d remote-name.d remote-time.d request.d resolve.d \
retry-connrefused.d retry.d retry-delay.d retry-max-time.d sasl-ir.d \
service-name.d show-error.d silent.d socks4a.d socks4.d socks5.d \
socks5-basic.d socks5-gssapi.d proxy-pinnedpubkey.d \
socks5-gssapi-nec.d socks5-gssapi-service.d socks5-hostname.d \
speed-limit.d speed-time.d ssl-allow-beast.d ssl.d ssl-no-revoke.d \
ssl-reqd.d sslv2.d sslv3.d stderr.d suppress-connect-headers.d \
tcp-fastopen.d tcp-nodelay.d \
telnet-option.d tftp-blksize.d tftp-no-options.d time-cond.d \
tls-max.d \
tlsauthtype.d tlspassword.d tlsuser.d tlsv1.0.d tlsv1.1.d tlsv1.2.d \
tlsv1.3.d tlsv1.d trace-ascii.d trace.d trace-time.d tr-encoding.d \
unix-socket.d upload-file.d url.d use-ascii.d user-agent.d user.d \
verbose.d version.d write-out.d xattr.d request-target.d \
styled-output.d tls13-ciphers.d proxy-tls13-ciphers.d \
disallow-username-in-url.d haproxy-protocol.d
DPAGES = \
abstract-unix-socket.d \
alt-svc.d \
anyauth.d \
append.d basic.d \
cacert.d capath.d \
cert-status.d \
cert-type.d \
cert.d \
ciphers.d \
compressed-ssh.d \
compressed.d \
config.d \
connect-timeout.d \
connect-to.d \
continue-at.d \
cookie-jar.d \
cookie.d \
create-dirs.d \
crlf.d crlfile.d \
data-ascii.d \
data-binary.d \
data-urlencode.d \
data.d data-raw.d \
delegation.d \
digest.d \
disable-eprt.d \
disable-epsv.d \
disable.d \
disallow-username-in-url.d \
dns-interface.d \
dns-ipv4-addr.d \
dns-ipv6-addr.d \
dns-servers.d \
doh-url.d \
dump-header.d \
egd-file.d \
engine.d \
expect100-timeout.d \
fail-early.d \
fail.d \
false-start.d \
form-string.d \
form.d \
ftp-account.d \
ftp-alternative-to-user.d \
ftp-create-dirs.d \
ftp-method.d \
ftp-pasv.d \
ftp-port.d \
ftp-pret.d \
ftp-skip-pasv-ip.d \
ftp-ssl-ccc-mode.d \
ftp-ssl-ccc.d \
ftp-ssl-control.d \
get.d globoff.d \
happy-eyeballs-timeout-ms.d \
haproxy-protocol.d \
head.d header.d \
help.d \
hostpubmd5.d \
http0.9.d \
http1.0.d \
http1.1.d http2.d \
http2-prior-knowledge.d \
http3.d \
ignore-content-length.d \
include.d \
insecure.d \
interface.d \
ipv4.d ipv6.d \
junk-session-cookies.d \
keepalive-time.d \
key.d key-type.d \
krb.d libcurl.d \
limit-rate.d \
list-only.d \
local-port.d \
location-trusted.d \
location.d \
login-options.d \
mail-auth.d \
mail-from.d \
mail-rcpt.d \
manual.d \
max-filesize.d \
max-redirs.d \
max-time.d \
metalink.d \
negotiate.d \
netrc-file.d \
netrc-optional.d \
netrc.d \
next.d no-alpn.d \
no-buffer.d \
no-keepalive.d \
no-npn.d \
no-progress-meter.d \
no-sessionid.d \
noproxy.d \
ntlm.d ntlm-wb.d \
oauth2-bearer.d \
output.d \
parallel.d \
pass.d \
parallel-max.d \
path-as-is.d \
pinnedpubkey.d \
post301.d \
post302.d \
post303.d \
preproxy.d \
progress-bar.d \
proto-default.d \
proto-redir.d \
proto.d \
proxy-anyauth.d \
proxy-basic.d \
proxy-cacert.d \
proxy-capath.d \
proxy-cert-type.d \
proxy-cert.d \
proxy-ciphers.d \
proxy-crlfile.d \
proxy-digest.d \
proxy-header.d \
proxy-insecure.d \
proxy-key-type.d \
proxy-key.d \
proxy-negotiate.d \
proxy-ntlm.d \
proxy-pass.d \
proxy-pinnedpubkey.d \
proxy-service-name.d \
proxy-ssl-allow-beast.d \
proxy-tls13-ciphers.d \
proxy-tlsauthtype.d \
proxy-tlspassword.d \
proxy-tlsuser.d \
proxy-tlsv1.d \
proxy-user.d \
proxy.d \
proxy1.0.d \
proxytunnel.d \
pubkey.d quote.d \
random-file.d \
range.d raw.d \
referer.d \
remote-header-name.d \
remote-name-all.d \
remote-name.d \
remote-time.d \
request-target.d \
request.d \
resolve.d \
retry-connrefused.d \
retry-delay.d \
retry-max-time.d \
retry.d \
sasl-authzid.d \
sasl-ir.d \
service-name.d \
show-error.d \
silent.d \
socks4.d socks5.d \
socks4a.d \
socks5-basic.d \
socks5-gssapi-nec.d \
socks5-gssapi-service.d \
socks5-gssapi.d \
socks5-hostname.d \
speed-limit.d \
speed-time.d \
ssl-allow-beast.d \
ssl-no-revoke.d \
ssl-reqd.d \
ssl.d \
sslv2.d sslv3.d \
stderr.d \
styled-output.d \
suppress-connect-headers.d \
tcp-fastopen.d \
tcp-nodelay.d \
telnet-option.d \
tftp-blksize.d \
tftp-no-options.d \
time-cond.d \
tls-max.d \
tls13-ciphers.d \
tlsauthtype.d \
tlspassword.d \
tlsuser.d \
tlsv1.0.d \
tlsv1.1.d \
tlsv1.2.d \
tlsv1.3.d tlsv1.d \
tr-encoding.d \
trace-ascii.d \
trace-time.d \
trace.d \
unix-socket.d \
upload-file.d \
url.d use-ascii.d \
user-agent.d \
user.d verbose.d \
version.d \
write-out.d \
xattr.d
OTHERPAGES = page-footer page-header

View file

@ -0,0 +1,17 @@
Long: alt-svc
Arg: <file name>
Protocols: HTTPS
Help: Enable alt-svc with this cache file
Added: 7.64.1
---
WARNING: this option is experiemental. Do not use in production.
This option enables the alt-svc parser in curl. If the file name points to an
existing alt-svc cache file, that will be used. After a completed transfer,
the cache will be saved to the file name again if it has been modified.
Specify a "" file name (zero length) to avoid loading/saving and make curl
just handle the cache in memory.
If this option is used several times, curl will load contents from all the
files but the the last one will be used for saving.

View file

@ -25,9 +25,9 @@ should not be set. If the option is not set, then curl will use the
certificates in the system and user Keychain to verify the peer, which is the
preferred method of verifying the peer's certificate chain.
(Schannel/WinSSL only) This option is supported for WinSSL in Windows 7 or
later with libcurl 7.60 or later. This option is supported for backward
compatibility with other SSL engines; instead it is recommended to use Windows'
store of root certificates (the default for WinSSL).
(Schannel only) This option is supported for Schannel in Windows 7 or later with
libcurl 7.60 or later. This option is supported for backward compatibility
with other SSL engines; instead it is recommended to use Windows' store of
root certificates (the default for Schannel).
If this option is used several times, the last one will be used.

View file

@ -36,7 +36,7 @@ system or user keychain, or the path to a PKCS#12-encoded certificate and
private key. If you want to use a file from the current directory, please
precede it with "./" prefix, in order to avoid confusion with a nickname.
(Schannel/WinSSL only) Client certificates must be specified by a path
(Schannel only) Client certificates must be specified by a path
expression to a certificate store. (Loading PFX is not supported; you can
import it to a store first). You can use
"<store location>\\<store name>\\<thumbprint>" to refer to a certificate

View file

@ -15,12 +15,12 @@ if so, the colon or equals characters can be used as separators. If the option
is specified with one or two dashes, there can be no colon or equals character
between the option and its parameter.
If the parameter is to contain whitespace, the parameter must be enclosed
within quotes. Within double quotes, the following escape sequences are
available: \\\\, \\", \\t, \\n, \\r and \\v. A backslash preceding any other
letter is ignored. If the first column of a config line is a '#' character,
the rest of the line will be treated as a comment. Only write one option per
physical line in the config file.
If the parameter contains whitespace (or starts with : or =), the parameter
must be enclosed within quotes. Within double quotes, the following escape
sequences are available: \\\\, \\", \\t, \\n, \\r and \\v. A backslash
preceding any other letter is ignored. If the first column of a config line is
a '#' character, the rest of the line will be treated as a comment. Only write
one option per physical line in the config file.
Specify the filename to --config as '-' to make curl read the file from stdin.
@ -40,7 +40,7 @@ Unix-like systems (which returns the home dir given the current user in your
system). On Windows, it then checks for the APPDATA variable, or as a last
resort the '%USERPROFILE%\\Application Data'.
2) On windows, if there is no _curlrc file in the home dir, it checks for one
2) On windows, if there is no .curlrc file in the home dir, it checks for one
in the same dir the curl executable is placed. On Unix-like systems, it will
simply try to load .curlrc from the determined home dir.

View file

@ -1,6 +1,6 @@
Short: b
Long: cookie
Arg: <data>
Arg: <data|filename>
Protocols: HTTP
Help: Send cookies from string/file
---

View file

@ -24,7 +24,7 @@ chunk that looks like \&'name=daniel&skill=lousy'.
If you start the data with the letter @, the rest should be a file name to
read the data from, or - if you want curl to read the data from
stdin. Multiple files can also be specified. Posting data from a file named
'foobar' would thus be done with --data @foobar. When --data is told to read
\&'foobar' would thus be done with --data @foobar. When --data is told to read
from a file like that, carriage returns and newlines will be stripped out. If
you don't want the @ character to have a special interpretation use --data-raw
instead.

View file

@ -2,6 +2,7 @@ Long: doh-url
Arg: <URL>
Help: Resolve host names over DOH
Protocols: all
Added: 7.62.0
---
Specifies which DNS-over-HTTPS (DOH) server to use to resolve hostnames,
instead of using the default name resolver mechanism. The URL must be HTTPS.

View file

@ -12,6 +12,8 @@ site sends to you. Cookies from the headers could then be read in a second
curl invocation by using the --cookie option! The --cookie-jar option is a
better way to store cookies.
If no headers are received, the use of this option will create an empty file.
When used in FTP, the FTP server response lines are considered being "headers"
and thus are saved there.

View file

@ -0,0 +1,13 @@
Long: http0.9
Tags: Versions
Protocols: HTTP
Added:
Help: Allow HTTP 0.9 responses
---
Tells curl to be fine with HTTP version 0.9 response.
HTTP/0.9 is a completely headerless response and therefore you can also
connect with this to non-HTTP servers and still get a response since curl will
simply transparently downgrade - if allowed.
Since curl 7.66.0, HTTP/0.9 is disabled by default.

View file

@ -6,5 +6,6 @@ Mutexed: http1.1 http1.0 http2-prior-knowledge
Requires: HTTP/2
See-also: no-alpn
Help: Use HTTP 2
See-also: http1.1 http3
---
Tells curl to use HTTP version 2.

View file

@ -0,0 +1,19 @@
Long: http3
Tags: Versions
Protocols: HTTP
Added: 7.66.0
Mutexed: http1.1 http1.0 http2 http2-prior-knowledge
Requires: HTTP/3
Help: Use HTTP v3
See-also: http1.1 http2
---
WARNING: this option is experiemental. Do not use in production.
Tells curl to use HTTP version 3 directly to the host and port number used in
the URL. A normal HTTP/3 transaction will be done to a host and then get
redirected via Alt-SVc, but this option allows a user to circumvent that when
you know that the target speaks HTTP/3 on the given host and port.
This option will make curl fail if a QUIC connection cannot be established, it
cannot fall back to a lower HTTP version on its own.

View file

@ -5,7 +5,7 @@ Help: Private key file name
---
Private key file name. Allows you to provide your private key in this separate
file. For SSH, if not specified, curl tries the following candidates in order:
'~/.ssh/id_rsa', '~/.ssh/id_dsa', './id_rsa', './id_dsa'.
\&'~/.ssh/id_rsa', '~/.ssh/id_dsa', './id_rsa', './id_dsa'.
If curl is built against OpenSSL library, and the engine pkcs11 is available,
then a PKCS#11 URI (RFC 7512) can be used to specify a private key located in a

View file

@ -4,8 +4,7 @@ Help: Maximum number of redirects allowed
Protocols: HTTP
---
Set maximum number of redirection-followings allowed. When --location is used,
is used to prevent curl from following redirections \&"in absurdum". By
default, the limit is set to 50 redirections. Set this option to -1 to make it
unlimited.
is used to prevent curl from following redirections too much. By default, the
limit is set to 50 redirections. Set this option to -1 to make it unlimited.
If this option is used several times, the last one will be used.

View file

@ -0,0 +1,10 @@
Long: no-progress-meter
Help: Do not show the progress meter
See-also: verbose silent
Added: 7.67.0
---
Option to switch off the progress meter output without muting or otherwise
affecting warning and informational messages like --silent does.
Note that this is the negated option name documented. You can thus use
--progress-meter to enable the progress meter again.

View file

@ -0,0 +1,9 @@
Long: parallel-max
Help: Maximum concurrency for parallel transfers
Added: 7.66.0
See-also: parallel
---
When asked to do parallel transfers, using --parallel, this option controls
the maximum amount of transfers to do simultaneously.
The default is 50.

View file

@ -0,0 +1,7 @@
Short: Z
Long: parallel
Help: Perform transfers in parallel
Added: 7.66.0
---
Makes curl perform its transfers in parallel as compared to the regular serial
manner.

View file

@ -15,13 +15,11 @@ abort the connection before sending or receiving any data.
PEM/DER support:
7.39.0: OpenSSL, GnuTLS and GSKit
7.43.0: NSS and wolfSSL/CyaSSL
7.43.0: NSS and wolfSSL
7.47.0: mbedtls
7.49.0: PolarSSL
sha256 support:
7.44.0: OpenSSL, GnuTLS, NSS and wolfSSL/CyaSSL.
7.44.0: OpenSSL, GnuTLS, NSS and wolfSSL
7.47.0: mbedtls
7.49.0: PolarSSL
Other SSL backends not supported.
If this option is used several times, the last one will be used.

View file

@ -11,7 +11,8 @@ Example, allow only HTTP and HTTPS on redirect:
curl --proto-redir -all,http,https http://example.com
By default curl will allow all protocols on redirect except several disabled
for security reasons: Since 7.19.4 FILE and SCP are disabled, and since 7.40.0
SMB and SMBS are also disabled. Specifying \fIall\fP or \fI+all\fP enables all
protocols on redirect, including those disabled for security.
By default curl will allow HTTP, HTTPS, FTP and FTPS on redirect (7.65.2).
Older versions of curl allowed all protocols on redirect except several
disabled for security reasons: Since 7.19.4 FILE and SCP are disabled, and
since 7.40.0 SMB and SMBS are also disabled. Specifying \fIall\fP or \fI+all\fP
enables all protocols on redirect, including those disabled for security.

View file

@ -6,7 +6,7 @@ Added: 7.20.2
---
Tells curl to limit what protocols it may use in the transfer. Protocols are
evaluated left to right, are comma separated, and are each a protocol name or
'all', optionally prefixed by zero or more modifiers. Available modifiers are:
\&'all', optionally prefixed by zero or more modifiers. Available modifiers are:
.RS
.TP 3
.B +

View file

@ -9,4 +9,8 @@ ciphers. Read up on TLS 1.3 cipher suite details on this URL:
https://curl.haxx.se/docs/ssl-ciphers.html
This option is currently used only when curl is built to use OpenSSL 1.1.1 or
later. If you are using a different SSL backend you can try setting TLS 1.3
cipher suites by using the --proxy-ciphers option.
If this option is used several times, the last one will be used.

View file

@ -9,4 +9,10 @@ If you use a Windows SSPI-enabled curl binary and do either Negotiate or NTLM
authentication then you can tell curl to select the user name and password
from your environment by specifying a single colon with this option: "-U :".
On systems where it works, curl will hide the given option argument from
process listings. This is not enough to protect credentials from possibly
getting seen by other users on the same system as they will still be visible
for a brief moment before cleared. Such sensitive data should be retrieved
from a file instead or similar and never used in clear text in a command line.
If this option is used several times, the last one will be used.

View file

@ -3,11 +3,10 @@ Short: p
Help: Operate through an HTTP proxy tunnel (using CONNECT)
See-also: proxy
---
When an HTTP proxy is used --proxy, this option will cause non-HTTP protocols
to attempt to tunnel through the proxy instead of merely using it to do
HTTP-like operations. The tunnel approach is made with the HTTP proxy CONNECT
request and requires that the proxy allows direct connect to the remote port
number curl wants to tunnel through to.
When an HTTP proxy is used --proxy, this option will make curl tunnel through
the proxy. The tunnel approach is made with the HTTP proxy CONNECT request and
requires that the proxy allows direct connect to the remote port number curl
wants to tunnel through to.
To suppress proxy CONNECT response headers when curl is set to output headers
use --suppress-connect-headers.

View file

@ -16,9 +16,10 @@ If the server returns failure for one of the commands, the entire operation
will be aborted. You must send syntactically correct FTP commands as RFC 959
defines to FTP servers, or one of the commands listed below to SFTP servers.
This option can be used multiple times. When speaking to an FTP server, prefix
the command with an asterisk (*) to make curl continue even if the command
fails as by default curl will stop at first failure.
Prefix the command with an asterisk (*) to make curl continue even if the
command fails as by default curl will stop at first failure.
This option can be used multiple times.
SFTP is a binary protocol. Unlike for FTP, curl interprets SFTP quote commands
itself before sending them to the server. File names may be quoted

View file

@ -11,6 +11,10 @@ the number used for the specific protocol the host will be used for. It means
you need several entries if you want to provide address for the same host but
different ports.
By specifying '*' as host you can tell curl to resolve any host and specific
port pair to the specified address. Wildcard is resolved last so any --resolve
with a specific host and port will be used first.
The provided address set by this option will be used even if --ipv4 or --ipv6
is set to make curl use another IP version.
@ -18,4 +22,6 @@ Support for providing the IP address within [brackets] was added in 7.57.0.
Support for providing multiple IP addresses per entry was added in 7.59.0.
Support for resolving with wildcard was added in 7.64.0.
This option can be used many times to add many host names to resolve.

View file

@ -14,4 +14,7 @@ for all forthcoming retries it will double the waiting time until it reaches
using --retry-delay you disable this exponential backoff algorithm. See also
--retry-max-time to limit the total time allowed for retries.
Since curl 7.66.0, curl will comply with the Retry-After: response header if
one was present to know when to issue the next retry.
If this option is used several times, the last one will be used.

View file

@ -0,0 +1,11 @@
Long: sasl-authzid
Help: Use this identity to act as during SASL PLAIN authentication
Added: 7.66.0
---
Use this authorisation identity (authzid), during SASL PLAIN authentication,
in addition to the authentication identity (authcid) as specified by --user.
If the option isn't specified, the server will derive the authzid from the
authcid, but if specified, and depending on the server implementation, it may
be used to access another user's inbox, that the user has been granted access
to, or a shared mailbox for example.

View file

@ -1,7 +1,7 @@
Long: ssl-no-revoke
Help: Disable cert revocation checks (WinSSL)
Help: Disable cert revocation checks (Schannel)
Added: 7.44.0
---
(WinSSL) This option tells curl to disable certificate revocation checks.
(Schannel) This option tells curl to disable certificate revocation checks.
WARNING: this option loosens the SSL security, and by using this flag you ask
for exactly that.

View file

@ -4,11 +4,11 @@ Tags: Versions
Protocols: SSL
Added: 7.54.0
Requires: TLS
See-also: tlsv1.0 tlsv1.1 tlsv1.2
Help: Use TLSv1.0 or greater
See-also: tlsv1.0 tlsv1.1 tlsv1.2 tlsv1.3
Help: Set maximum allowed TLS version
---
VERSION defines maximum supported TLS version. A minimum is defined
by arguments tlsv1.0 or tlsv1.1 or tlsv1.2.
VERSION defines maximum supported TLS version. The minimum acceptable version
is set by tlsv1.0, tlsv1.1, tlsv1.2 or tlsv1.3.
.RS
.IP "default"

View file

@ -9,4 +9,8 @@ cipher suite details on this URL:
https://curl.haxx.se/docs/ssl-ciphers.html
This option is currently used only when curl is built to use OpenSSL 1.1.1 or
later. If you are using a different SSL backend you can try setting TLS 1.3
cipher suites by using the --ciphers option.
If this option is used several times, the last one will be used.

View file

@ -4,3 +4,7 @@ Protocols: TLS
Added: 7.34.0
---
Forces curl to use TLS version 1.0 or later when connecting to a remote TLS server.
In old versions of curl this option was documented to allow _only_ TLS 1.0,
but behavior was inconsistent depending on the TLS library. Use --tls-max if
you want to set a maximum TLS version.

View file

@ -4,3 +4,7 @@ Protocols: TLS
Added: 7.34.0
---
Forces curl to use TLS version 1.1 or later when connecting to a remote TLS server.
In old versions of curl this option was documented to allow _only_ TLS 1.1,
but behavior was inconsistent depending on the TLS library. Use --tls-max if
you want to set a maximum TLS version.

View file

@ -4,3 +4,7 @@ Protocols: TLS
Added: 7.34.0
---
Forces curl to use TLS version 1.2 or later when connecting to a remote TLS server.
In old versions of curl this option was documented to allow _only_ TLS 1.2,
but behavior was inconsistent depending on the TLS library. Use --tls-max if
you want to set a maximum TLS version.

View file

@ -12,6 +12,12 @@ The user name and passwords are split up on the first colon, which makes it
impossible to use a colon in the user name with this option. The password can,
still.
On systems where it works, curl will hide the given option argument from
process listings. This is not enough to protect credentials from possibly
getting seen by other users on the same system as they will still be visible
for a brief moment before cleared. Such sensitive data should be retrieved
from a file instead or similar and never used in clear text in a command line.
When using Kerberos V5 with a Windows based server you should include the
Windows domain name in the user name, in order for the server to successfully
obtain a Kerberos Ticket. If you don't then the initial authentication

View file

@ -55,4 +55,6 @@ there are errors (such as the file or server not being available).
.IP "PSL"
PSL is short for Public Suffix List and means that this curl has been built
with knowledge about "public suffixes".
.IP "MultiSSL"
This curl supports multiple TLS backends.
.RE

View file

@ -20,7 +20,7 @@
.\" *
.\" **************************************************************************
.\"
.TH curl-config 1 "November 30, 2017" "Curl 7.63.0" "curl-config manual"
.TH curl-config 1 "November 30, 2017" "Curl 7.67.0" "curl-config manual"
.SH NAME
curl-config \- Get information about a libcurl installation

View file

@ -22,7 +22,7 @@
.\"
.\" DO NOT EDIT. Generated by the curl project gen.pl man page generator.
.\"
.TH curl 1 "November 16, 2016" "Curl 7.63.0" "Curl Manual"
.TH curl 1 "November 16, 2016" "Curl 7.67.0" "Curl Manual"
.SH NAME
curl \- transfer a URL
@ -146,6 +146,20 @@ Note: netstat shows the path of an abstract socket prefixed with '@', however
the <path> argument should not have this leading character.
Added in 7.53.0.
.IP "--alt-svc <file name>"
(HTTPS) WARNING: this option is experiemental. Do not use in production.
This option enables the alt-svc parser in curl. If the file name points to an
existing alt-svc cache file, that will be used. After a completed transfer,
the cache will be saved to the file name again if it has been modified.
Specify a "" file name (zero length) to avoid loading/saving and make curl
just handle the cache in memory.
If this option is used several times, curl will load contents from all the
files but the the last one will be used for saving.
Added in 7.64.1.
.IP "--anyauth"
(HTTP) Tells curl to figure out authentication method by itself, and use the most
secure one the remote site claims to support. This is done by first doing a
@ -197,10 +211,10 @@ should not be set. If the option is not set, then curl will use the
certificates in the system and user Keychain to verify the peer, which is the
preferred method of verifying the peer's certificate chain.
(Schannel/WinSSL only) This option is supported for WinSSL in Windows 7 or
later with libcurl 7.60 or later. This option is supported for backward
compatibility with other SSL engines; instead it is recommended to use Windows'
store of root certificates (the default for WinSSL).
(Schannel only) This option is supported for Schannel in Windows 7 or later with
libcurl 7.60 or later. This option is supported for backward compatibility
with other SSL engines; instead it is recommended to use Windows' store of
root certificates (the default for Schannel).
If this option is used several times, the last one will be used.
.IP "--capath <dir>"
@ -264,7 +278,7 @@ system or user keychain, or the path to a PKCS#12-encoded certificate and
private key. If you want to use a file from the current directory, please
precede it with "./" prefix, in order to avoid confusion with a nickname.
(Schannel/WinSSL only) Client certificates must be specified by a path
(Schannel only) Client certificates must be specified by a path
expression to a certificate store. (Loading PFX is not supported; you can
import it to a store first). You can use
"<store location>\\<store name>\\<thumbprint>" to refer to a certificate
@ -307,12 +321,12 @@ if so, the colon or equals characters can be used as separators. If the option
is specified with one or two dashes, there can be no colon or equals character
between the option and its parameter.
If the parameter is to contain whitespace, the parameter must be enclosed
within quotes. Within double quotes, the following escape sequences are
available: \\\\, \\", \\t, \\n, \\r and \\v. A backslash preceding any other
letter is ignored. If the first column of a config line is a '#' character,
the rest of the line will be treated as a comment. Only write one option per
physical line in the config file.
If the parameter contains whitespace (or starts with : or =), the parameter
must be enclosed within quotes. Within double quotes, the following escape
sequences are available: \\\\, \\", \\t, \\n, \\r and \\v. A backslash
preceding any other letter is ignored. If the first column of a config line is
a '#' character, the rest of the line will be treated as a comment. Only write
one option per physical line in the config file.
Specify the filename to \fI-K, --config\fP as '-' to make curl read the file from stdin.
@ -332,7 +346,7 @@ Unix-like systems (which returns the home dir given the current user in your
system). On Windows, it then checks for the APPDATA variable, or as a last
resort the '%USERPROFILE%\\Application Data'.
2) On windows, if there is no _curlrc file in the home dir, it checks for one
2) On windows, if there is no .curlrc file in the home dir, it checks for one
in the same dir the curl executable is placed. On Unix-like systems, it will
simply try to load .curlrc from the determined home dir.
@ -409,7 +423,7 @@ lethal situation.
If this option is used several times, the last specified file name will be
used.
.IP "-b, --cookie <data>"
.IP "-b, --cookie <data|filename>"
(HTTP) Pass the data to the HTTP server in the Cookie header. It is supposedly
the data previously received from the server in a "Set-Cookie:" line. The
data should be in the format "NAME1=VALUE1; NAME2=VALUE2".
@ -528,7 +542,7 @@ chunk that looks like \&'name=daniel&skill=lousy'.
If you start the data with the letter @, the rest should be a file name to
read the data from, or - if you want curl to read the data from
stdin. Multiple files can also be specified. Posting data from a file named
'foobar' would thus be done with \fI-d, --data\fP @foobar. When --data is told to read
\&'foobar' would thus be done with \fI-d, --data\fP @foobar. When --data is told to read
from a file like that, carriage returns and newlines will be stripped out. If
you don't want the @ character to have a special interpretation use \fI--data-raw\fP
instead.
@ -621,6 +635,8 @@ address.
instead of using the default name resolver mechanism. The URL must be HTTPS.
If this option is used several times, the last one will be used.
Added in 7.62.0.
.IP "-D, --dump-header <filename>"
(HTTP FTP) Write the received protocol headers to the specified file.
@ -629,6 +645,8 @@ site sends to you. Cookies from the headers could then be read in a second
curl invocation by using the \fI-b, --cookie\fP option! The \fI-c, --cookie-jar\fP option is a
better way to store cookies.
If no headers are received, the use of this option will create an empty file.
When used in FTP, the FTP server response lines are considered being "headers"
and thus are saved there.
@ -1037,6 +1055,14 @@ be the 128 bit MD5 checksum of the remote host's public key, curl will refuse
the connection with the host unless the md5sums match.
Added in 7.17.1.
.IP "--http0.9"
(HTTP) Tells curl to be fine with HTTP version 0.9 response.
HTTP/0.9 is a completely headerless response and therefore you can also
connect with this to non-HTTP servers and still get a response since curl will
simply transparently downgrade - if allowed.
Since curl 7.66.0, HTTP/0.9 is disabled by default.
.IP "-0, --http1.0"
(HTTP) Tells curl to use HTTP version 1.0 instead of using its internally preferred
HTTP version.
@ -1056,7 +1082,20 @@ protocol version in the TLS handshake.
.IP "--http2"
(HTTP) Tells curl to use HTTP version 2.
See also \fI--no-alpn\fP. \fI--http2\fP requires that the underlying libcurl was built to support HTTP/2. This option overrides \fI--http1.1\fP and \fI-0, --http1.0\fP and \fI--http2-prior-knowledge\fP. Added in 7.33.0.
See also \fI--http1.1\fP and \fI--http3\fP. \fI--http2\fP requires that the underlying libcurl was built to support HTTP/2. This option overrides \fI--http1.1\fP and \fI-0, --http1.0\fP and \fI--http2-prior-knowledge\fP. Added in 7.33.0.
.IP "--http3"
(HTTP)
WARNING: this option is experiemental. Do not use in production.
Tells curl to use HTTP version 3 directly to the host and port number used in
the URL. A normal HTTP/3 transaction will be done to a host and then get
redirected via Alt-SVc, but this option allows a user to circumvent that when
you know that the target speaks HTTP/3 on the given host and port.
This option will make curl fail if a QUIC connection cannot be established, it
cannot fall back to a lower HTTP version on its own.
See also \fI--http1.1\fP and \fI--http2\fP. \fI--http3\fP requires that the underlying libcurl was built to support HTTP/3. This option overrides \fI--http1.1\fP and \fI-0, --http1.0\fP and \fI--http2\fP and \fI--http2-prior-knowledge\fP. Added in 7.66.0.
.IP "--ignore-content-length"
(FTP HTTP) For HTTP, Ignore the Content-Length header. This is particularly useful for
servers running Apache 1.x, which will report incorrect Content-Length for
@ -1135,7 +1174,7 @@ If this option is used several times, the last one will be used.
.IP "--key <key>"
(TLS SSH) Private key file name. Allows you to provide your private key in this separate
file. For SSH, if not specified, curl tries the following candidates in order:
'~/.ssh/id_rsa', '~/.ssh/id_dsa', './id_rsa', './id_dsa'.
\&'~/.ssh/id_rsa', '~/.ssh/id_dsa', './id_rsa', './id_dsa'.
If curl is built against OpenSSL library, and the engine pkcs11 is available,
then a PKCS#11 URI (RFC 7512) can be used to specify a private key located in a
@ -1286,9 +1325,8 @@ than this given limit. This concerns both FTP and HTTP transfers.
See also \fI--limit-rate\fP.
.IP "--max-redirs <num>"
(HTTP) Set maximum number of redirection-followings allowed. When \fI-L, --location\fP is used,
is used to prevent curl from following redirections \&"in absurdum". By
default, the limit is set to 50 redirections. Set this option to -1 to make it
unlimited.
is used to prevent curl from following redirections too much. By default, the
limit is set to 50 redirections. Set this option to -1 to make it unlimited.
If this option is used several times, the last one will be used.
.IP "-m, --max-time <seconds>"
@ -1407,6 +1445,14 @@ with an SSL library that supports NPN. NPN is used by a libcurl that supports
HTTP/2 to negotiate HTTP/2 support with the server during https sessions.
See also \fI--no-alpn\fP and \fI--http2\fP. \fI--no-npn\fP requires that the underlying libcurl was built to support TLS. Added in 7.36.0.
.IP "--no-progress-meter"
Option to switch off the progress meter output without muting or otherwise
affecting warning and informational messages like \fI-s, --silent\fP does.
Note that this is the negated option name documented. You can thus use
--progress-meter to enable the progress meter again.
See also \fI-v, --verbose\fP and \fI-s, --silent\fP. Added in 7.67.0.
.IP "--no-sessionid"
(TLS) Disable curl's use of SSL session-ID caching. By default all transfers are
done using the cache. Note that while nothing should ever get hurt by
@ -1487,6 +1533,18 @@ dynamically. Specifying the output as '-' (a single dash) will force the
output to be done to stdout.
See also \fI-O, --remote-name\fP and \fI--remote-name-all\fP and \fI-J, --remote-header-name\fP.
.IP "--parallel-max"
When asked to do parallel transfers, using \fI-Z, --parallel\fP, this option controls
the maximum amount of transfers to do simultaneously.
The default is 50.
See also \fI-Z, --parallel\fP. Added in 7.66.0.
.IP "-Z, --parallel"
Makes curl perform its transfers in parallel as compared to the regular serial
manner.
Added in 7.66.0.
.IP "--pass <phrase>"
(SSH TLS) Passphrase for the private key
@ -1510,13 +1568,11 @@ abort the connection before sending or receiving any data.
PEM/DER support:
7.39.0: OpenSSL, GnuTLS and GSKit
7.43.0: NSS and wolfSSL/CyaSSL
7.43.0: NSS and wolfSSL
7.47.0: mbedtls
7.49.0: PolarSSL
sha256 support:
7.44.0: OpenSSL, GnuTLS, NSS and wolfSSL/CyaSSL.
7.44.0: OpenSSL, GnuTLS, NSS and wolfSSL
7.47.0: mbedtls
7.49.0: PolarSSL
Other SSL backends not supported.
If this option is used several times, the last one will be used.
@ -1597,16 +1653,17 @@ Example, allow only HTTP and HTTPS on redirect:
curl --proto-redir -all,http,https http://example.com
By default curl will allow all protocols on redirect except several disabled
for security reasons: Since 7.19.4 FILE and SCP are disabled, and since 7.40.0
SMB and SMBS are also disabled. Specifying \fIall\fP or \fI+all\fP enables all
protocols on redirect, including those disabled for security.
By default curl will allow HTTP, HTTPS, FTP and FTPS on redirect (7.65.2).
Older versions of curl allowed all protocols on redirect except several
disabled for security reasons: Since 7.19.4 FILE and SCP are disabled, and
since 7.40.0 SMB and SMBS are also disabled. Specifying \fIall\fP or \fI+all\fP
enables all protocols on redirect, including those disabled for security.
Added in 7.20.2.
.IP "--proto <protocols>"
Tells curl to limit what protocols it may use in the transfer. Protocols are
evaluated left to right, are comma separated, and are each a protocol name or
'all', optionally prefixed by zero or more modifiers. Available modifiers are:
\&'all', optionally prefixed by zero or more modifiers. Available modifiers are:
.RS
.TP 3
.B +
@ -1756,6 +1813,10 @@ ciphers. Read up on TLS 1.3 cipher suite details on this URL:
https://curl.haxx.se/docs/ssl-ciphers.html
This option is currently used only when curl is built to use OpenSSL 1.1.1 or
later. If you are using a different SSL backend you can try setting TLS 1.3
cipher suites by using the \fI--proxy-ciphers\fP option.
If this option is used several times, the last one will be used.
.IP "--proxy-tlsauthtype <type>"
Same as \fI--tlsauthtype\fP but used in HTTPS proxy context.
@ -1780,6 +1841,12 @@ If you use a Windows SSPI-enabled curl binary and do either Negotiate or NTLM
authentication then you can tell curl to select the user name and password
from your environment by specifying a single colon with this option: "-U :".
On systems where it works, curl will hide the given option argument from
process listings. This is not enough to protect credentials from possibly
getting seen by other users on the same system as they will still be visible
for a brief moment before cleared. Such sensitive data should be retrieved
from a file instead or similar and never used in clear text in a command line.
If this option is used several times, the last one will be used.
.IP "-x, --proxy [protocol://]host[:port]"
Use the specified proxy.
@ -1824,11 +1891,10 @@ The only difference between this and the HTTP proxy option \fI-x, --proxy\fP, is
attempts to use CONNECT through the proxy will specify an HTTP 1.0 protocol
instead of the default HTTP 1.1.
.IP "-p, --proxytunnel"
When an HTTP proxy is used \fI-x, --proxy\fP, this option will cause non-HTTP protocols
to attempt to tunnel through the proxy instead of merely using it to do
HTTP-like operations. The tunnel approach is made with the HTTP proxy CONNECT
request and requires that the proxy allows direct connect to the remote port
number curl wants to tunnel through to.
When an HTTP proxy is used \fI-x, --proxy\fP, this option will make curl tunnel through
the proxy. The tunnel approach is made with the HTTP proxy CONNECT request and
requires that the proxy allows direct connect to the remote port number curl
wants to tunnel through to.
To suppress proxy CONNECT response headers when curl is set to output headers
use \fI--suppress-connect-headers\fP.
@ -1858,9 +1924,10 @@ If the server returns failure for one of the commands, the entire operation
will be aborted. You must send syntactically correct FTP commands as RFC 959
defines to FTP servers, or one of the commands listed below to SFTP servers.
This option can be used multiple times. When speaking to an FTP server, prefix
the command with an asterisk (*) to make curl continue even if the command
fails as by default curl will stop at first failure.
Prefix the command with an asterisk (*) to make curl continue even if the
command fails as by default curl will stop at first failure.
This option can be used multiple times.
SFTP is a binary protocol. Unlike for FTP, curl interprets SFTP quote commands
itself before sending them to the server. File names may be quoted
@ -2051,6 +2118,10 @@ the number used for the specific protocol the host will be used for. It means
you need several entries if you want to provide address for the same host but
different ports.
By specifying '*' as host you can tell curl to resolve any host and specific
port pair to the specified address. Wildcard is resolved last so any \fI--resolve\fP
with a specific host and port will be used first.
The provided address set by this option will be used even if \fI-4, --ipv4\fP or \fI-6, --ipv6\fP
is set to make curl use another IP version.
@ -2058,6 +2129,8 @@ Support for providing the IP address within [brackets] was added in 7.57.0.
Support for providing multiple IP addresses per entry was added in 7.59.0.
Support for resolving with wildcard was added in 7.64.0.
This option can be used many times to add many host names to resolve.
Added in 7.21.3.
@ -2098,9 +2171,22 @@ for all forthcoming retries it will double the waiting time until it reaches
using \fI--retry-delay\fP you disable this exponential backoff algorithm. See also
\fI--retry-max-time\fP to limit the total time allowed for retries.
Since curl 7.66.0, curl will comply with the Retry-After: response header if
one was present to know when to issue the next retry.
If this option is used several times, the last one will be used.
Added in 7.12.3.
.IP "--sasl-authzid"
Use this authorisation identity (authzid), during SASL PLAIN authentication,
in addition to the authentication identity (authcid) as specified by \fI-u, --user\fP.
If the option isn't specified, the server will derive the authzid from the
authcid, but if specified, and depending on the server implementation, it may
be used to access another user's inbox, that the user has been granted access
to, or a shared mailbox for example.
Added in 7.66.0.
.IP "--sasl-ir"
Enable initial response in SASL authentication.
@ -2246,7 +2332,7 @@ this flag you ask for exactly that.
Added in 7.25.0.
.IP "--ssl-no-revoke"
(WinSSL) This option tells curl to disable certificate revocation checks.
(Schannel) This option tells curl to disable certificate revocation checks.
WARNING: this option loosens the SSL security, and by using this flag you ask
for exactly that.
@ -2348,8 +2434,8 @@ than the specified date/time.
If this option is used several times, the last one will be used.
.IP "--tls-max <VERSION>"
(SSL) VERSION defines maximum supported TLS version. A minimum is defined
by arguments tlsv1.0 or tlsv1.1 or tlsv1.2.
(SSL) VERSION defines maximum supported TLS version. The minimum acceptable version
is set by tlsv1.0, tlsv1.1, tlsv1.2 or tlsv1.3.
.RS
.IP "default"
@ -2364,7 +2450,7 @@ Use up to TLSv1.2.
Use up to TLSv1.3.
.RE
See also \fI--tlsv1.0\fP and \fI--tlsv1.1\fP and \fI--tlsv1.2\fP. \fI--tls-max\fP requires that the underlying libcurl was built to support TLS. Added in 7.54.0.
See also \fI--tlsv1.0\fP and \fI--tlsv1.1\fP and \fI--tlsv1.2\fP and \fI--tlsv1.3\fP. \fI--tls-max\fP requires that the underlying libcurl was built to support TLS. Added in 7.54.0.
.IP "--tls13-ciphers <list of TLS 1.3 ciphersuites>"
(TLS) Specifies which cipher suites to use in the connection if it negotiates TLS
1.3. The list of ciphers suites must specify valid ciphers. Read up on TLS 1.3
@ -2372,6 +2458,10 @@ cipher suite details on this URL:
https://curl.haxx.se/docs/ssl-ciphers.html
This option is currently used only when curl is built to use OpenSSL 1.1.1 or
later. If you are using a different SSL backend you can try setting TLS 1.3
cipher suites by using the \fI--ciphers\fP option.
If this option is used several times, the last one will be used.
.IP "--tlsauthtype <type>"
Set TLS authentication type. Currently, the only supported option is "SRP",
@ -2394,14 +2484,26 @@ Added in 7.21.4.
.IP "--tlsv1.0"
(TLS) Forces curl to use TLS version 1.0 or later when connecting to a remote TLS server.
In old versions of curl this option was documented to allow _only_ TLS 1.0,
but behavior was inconsistent depending on the TLS library. Use \fI--tls-max\fP if
you want to set a maximum TLS version.
Added in 7.34.0.
.IP "--tlsv1.1"
(TLS) Forces curl to use TLS version 1.1 or later when connecting to a remote TLS server.
In old versions of curl this option was documented to allow _only_ TLS 1.1,
but behavior was inconsistent depending on the TLS library. Use \fI--tls-max\fP if
you want to set a maximum TLS version.
Added in 7.34.0.
.IP "--tlsv1.2"
(TLS) Forces curl to use TLS version 1.2 or later when connecting to a remote TLS server.
In old versions of curl this option was documented to allow _only_ TLS 1.2,
but behavior was inconsistent depending on the TLS library. Use \fI--tls-max\fP if
you want to set a maximum TLS version.
Added in 7.34.0.
.IP "--tlsv1.3"
(TLS) Forces curl to use TLS version 1.3 or later when connecting to a remote TLS server.
@ -2512,6 +2614,12 @@ The user name and passwords are split up on the first colon, which makes it
impossible to use a colon in the user name with this option. The password can,
still.
On systems where it works, curl will hide the given option argument from
process listings. This is not enough to protect credentials from possibly
getting seen by other users on the same system as they will still be visible
for a brief moment before cleared. Such sensitive data should be retrieved
from a file instead or similar and never used in clear text in a command line.
When using Kerberos V5 with a Windows based server you should include the
Windows domain name in the user name, in order for the server to successfully
obtain a Kerberos Ticket. If you don't then the initial authentication
@ -2601,6 +2709,8 @@ there are errors (such as the file or server not being available).
.IP "PSL"
PSL is short for Public Suffix List and means that this curl has been built
with knowledge about "public suffixes".
.IP "MultiSSL"
This curl supports multiple TLS backends.
.RE
.IP "-w, --write-out <format>"
Make curl display information on stdout after a completed transfer. The format

View file

@ -5,7 +5,7 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2018, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
@ -20,10 +20,8 @@
*
***************************************************************************/
/* <DESC>
* Source code using the multi interface to download many
* files, with a capped maximum amount of simultaneous transfers.
* Download many files in parallel, in the same thread.
* </DESC>
* Written by Michael Wallner
*/
#include <errno.h>
@ -32,7 +30,7 @@
#ifndef WIN32
# include <unistd.h>
#endif
#include <curl/multi.h>
#include <curl/curl.h>
static const char *urls[] = {
"https://www.microsoft.com",
@ -84,27 +82,23 @@ static const char *urls[] = {
"https://www.un.org",
};
#define MAX 10 /* number of simultaneous transfers */
#define CNT sizeof(urls)/sizeof(char *) /* total number of transfers to do */
#define MAX_PARALLEL 10 /* number of simultaneous transfers */
#define NUM_URLS sizeof(urls)/sizeof(char *)
static size_t cb(char *d, size_t n, size_t l, void *p)
static size_t write_cb(char *data, size_t n, size_t l, void *userp)
{
/* take care of the data here, ignored in this example */
(void)d;
(void)p;
(void)data;
(void)userp;
return n*l;
}
static void init(CURLM *cm, int i)
static void add_transfer(CURLM *cm, int i)
{
CURL *eh = curl_easy_init();
curl_easy_setopt(eh, CURLOPT_WRITEFUNCTION, cb);
curl_easy_setopt(eh, CURLOPT_HEADER, 0L);
curl_easy_setopt(eh, CURLOPT_WRITEFUNCTION, write_cb);
curl_easy_setopt(eh, CURLOPT_URL, urls[i]);
curl_easy_setopt(eh, CURLOPT_PRIVATE, urls[i]);
curl_easy_setopt(eh, CURLOPT_VERBOSE, 0L);
curl_multi_add_handle(cm, eh);
}
@ -112,64 +106,23 @@ int main(void)
{
CURLM *cm;
CURLMsg *msg;
long L;
unsigned int C = 0;
int M, Q, U = -1;
fd_set R, W, E;
struct timeval T;
unsigned int transfers = 0;
int msgs_left = -1;
int still_alive = 1;
curl_global_init(CURL_GLOBAL_ALL);
cm = curl_multi_init();
/* we can optionally limit the total amount of connections this multi handle
uses */
curl_multi_setopt(cm, CURLMOPT_MAXCONNECTS, (long)MAX);
/* Limit the amount of simultaneous connections curl should allow: */
curl_multi_setopt(cm, CURLMOPT_MAXCONNECTS, (long)MAX_PARALLEL);
for(C = 0; C < MAX; ++C) {
init(cm, C);
}
for(transfers = 0; transfers < MAX_PARALLEL; transfers++)
add_transfer(cm, transfers);
while(U) {
curl_multi_perform(cm, &U);
do {
curl_multi_perform(cm, &still_alive);
if(U) {
FD_ZERO(&R);
FD_ZERO(&W);
FD_ZERO(&E);
if(curl_multi_fdset(cm, &R, &W, &E, &M)) {
fprintf(stderr, "E: curl_multi_fdset\n");
return EXIT_FAILURE;
}
if(curl_multi_timeout(cm, &L)) {
fprintf(stderr, "E: curl_multi_timeout\n");
return EXIT_FAILURE;
}
if(L == -1)
L = 100;
if(M == -1) {
#ifdef WIN32
Sleep(L);
#else
sleep((unsigned int)L / 1000);
#endif
}
else {
T.tv_sec = L/1000;
T.tv_usec = (L%1000)*1000;
if(0 > select(M + 1, &R, &W, &E, &T)) {
fprintf(stderr, "E: select(%i,,,,%li): %i: %s\n",
M + 1, L, errno, strerror(errno));
return EXIT_FAILURE;
}
}
}
while((msg = curl_multi_info_read(cm, &Q))) {
while((msg = curl_multi_info_read(cm, &msgs_left))) {
if(msg->msg == CURLMSG_DONE) {
char *url;
CURL *e = msg->easy_handle;
@ -182,13 +135,13 @@ int main(void)
else {
fprintf(stderr, "E: CURLMsg (%d)\n", msg->msg);
}
if(C < CNT) {
init(cm, C++);
U++; /* just to prevent it from remaining at 0 if there are more
URLs to get */
}
if(transfers < NUM_URLS)
add_transfer(cm, transfers++);
}
}
if(still_alive)
curl_multi_wait(cm, NULL, 0, 1000, NULL);
} while(still_alive || (transfers < NUM_URLS));
curl_multi_cleanup(cm);
curl_global_cleanup();

View file

@ -5,7 +5,7 @@
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 1998 - 2018, Daniel Stenberg, <daniel@haxx.se>, et al.
# Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
@ -61,5 +61,10 @@ include Makefile.inc
all: $(check_PROGRAMS)
CHECKSRC = $(CS_$(V))
CS_0 = @echo " RUN " $@;
CS_1 =
CS_ = $(CS_0)
checksrc:
@PERL@ $(top_srcdir)/lib/checksrc.pl -ASNPRINTF $(srcdir)/*.c
$(CHECKSRC)(@PERL@ $(top_srcdir)/lib/checksrc.pl -ASNPRINTF $(srcdir)/*.c)

View file

@ -21,7 +21,7 @@
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 1998 - 2018, Daniel Stenberg, <daniel@haxx.se>, et al.
# Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
@ -43,7 +43,7 @@
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 1998 - 2018, Daniel Stenberg, <daniel@haxx.se>, et al.
# Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
@ -163,11 +163,12 @@ check_PROGRAMS = 10-at-a-time$(EXEEXT) anyauthput$(EXEEXT) \
sslbackend$(EXEEXT) postit2-formadd$(EXEEXT) \
multi-formadd$(EXEEXT) shared-connection-cache$(EXEEXT) \
sftpuploadresume$(EXEEXT) http2-pushinmemory$(EXEEXT) \
parseurl$(EXEEXT) urlapi$(EXEEXT)
parseurl$(EXEEXT) urlapi$(EXEEXT) imap-authzid$(EXEEXT) \
pop3-authzid$(EXEEXT) smtp-authzid$(EXEEXT) http3$(EXEEXT) \
altsvc$(EXEEXT) http3-present$(EXEEXT)
subdir = docs/examples
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/ax_code_coverage.m4 \
$(top_srcdir)/m4/ax_compile_check_sizeof.m4 \
am__aclocal_m4_deps = $(top_srcdir)/m4/ax_compile_check_sizeof.m4 \
$(top_srcdir)/m4/curl-compilers.m4 \
$(top_srcdir)/m4/curl-confopts.m4 \
$(top_srcdir)/m4/curl-functions.m4 \
@ -203,6 +204,13 @@ AM_V_lt = $(am__v_lt_@AM_V@)
am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@)
am__v_lt_0 = --silent
am__v_lt_1 =
altsvc_SOURCES = altsvc.c
altsvc_OBJECTS = altsvc.$(OBJEXT)
altsvc_LDADD = $(LDADD)
@USE_EXPLICIT_LIB_DEPS_FALSE@altsvc_DEPENDENCIES = \
@USE_EXPLICIT_LIB_DEPS_FALSE@ $(LIBDIR)/libcurl.la
@USE_EXPLICIT_LIB_DEPS_TRUE@altsvc_DEPENDENCIES = \
@USE_EXPLICIT_LIB_DEPS_TRUE@ $(LIBDIR)/libcurl.la
anyauthput_SOURCES = anyauthput.c
anyauthput_OBJECTS = anyauthput.$(OBJEXT)
anyauthput_LDADD = $(LDADD)
@ -369,6 +377,19 @@ http2_upload_LDADD = $(LDADD)
@USE_EXPLICIT_LIB_DEPS_FALSE@ $(LIBDIR)/libcurl.la
@USE_EXPLICIT_LIB_DEPS_TRUE@http2_upload_DEPENDENCIES = \
@USE_EXPLICIT_LIB_DEPS_TRUE@ $(LIBDIR)/libcurl.la
http3_SOURCES = http3.c
http3_OBJECTS = http3.$(OBJEXT)
http3_LDADD = $(LDADD)
@USE_EXPLICIT_LIB_DEPS_FALSE@http3_DEPENDENCIES = \
@USE_EXPLICIT_LIB_DEPS_FALSE@ $(LIBDIR)/libcurl.la
@USE_EXPLICIT_LIB_DEPS_TRUE@http3_DEPENDENCIES = $(LIBDIR)/libcurl.la
http3_present_SOURCES = http3-present.c
http3_present_OBJECTS = http3-present.$(OBJEXT)
http3_present_LDADD = $(LDADD)
@USE_EXPLICIT_LIB_DEPS_FALSE@http3_present_DEPENDENCIES = \
@USE_EXPLICIT_LIB_DEPS_FALSE@ $(LIBDIR)/libcurl.la
@USE_EXPLICIT_LIB_DEPS_TRUE@http3_present_DEPENDENCIES = \
@USE_EXPLICIT_LIB_DEPS_TRUE@ $(LIBDIR)/libcurl.la
httpcustomheader_SOURCES = httpcustomheader.c
httpcustomheader_OBJECTS = httpcustomheader.$(OBJEXT)
httpcustomheader_LDADD = $(LDADD)
@ -396,6 +417,13 @@ imap_append_LDADD = $(LDADD)
@USE_EXPLICIT_LIB_DEPS_FALSE@ $(LIBDIR)/libcurl.la
@USE_EXPLICIT_LIB_DEPS_TRUE@imap_append_DEPENDENCIES = \
@USE_EXPLICIT_LIB_DEPS_TRUE@ $(LIBDIR)/libcurl.la
imap_authzid_SOURCES = imap-authzid.c
imap_authzid_OBJECTS = imap-authzid.$(OBJEXT)
imap_authzid_LDADD = $(LDADD)
@USE_EXPLICIT_LIB_DEPS_FALSE@imap_authzid_DEPENDENCIES = \
@USE_EXPLICIT_LIB_DEPS_FALSE@ $(LIBDIR)/libcurl.la
@USE_EXPLICIT_LIB_DEPS_TRUE@imap_authzid_DEPENDENCIES = \
@USE_EXPLICIT_LIB_DEPS_TRUE@ $(LIBDIR)/libcurl.la
imap_copy_SOURCES = imap-copy.c
imap_copy_OBJECTS = imap-copy.$(OBJEXT)
imap_copy_LDADD = $(LDADD)
@ -543,6 +571,13 @@ persistent_LDADD = $(LDADD)
@USE_EXPLICIT_LIB_DEPS_FALSE@ $(LIBDIR)/libcurl.la
@USE_EXPLICIT_LIB_DEPS_TRUE@persistent_DEPENDENCIES = \
@USE_EXPLICIT_LIB_DEPS_TRUE@ $(LIBDIR)/libcurl.la
pop3_authzid_SOURCES = pop3-authzid.c
pop3_authzid_OBJECTS = pop3-authzid.$(OBJEXT)
pop3_authzid_LDADD = $(LDADD)
@USE_EXPLICIT_LIB_DEPS_FALSE@pop3_authzid_DEPENDENCIES = \
@USE_EXPLICIT_LIB_DEPS_FALSE@ $(LIBDIR)/libcurl.la
@USE_EXPLICIT_LIB_DEPS_TRUE@pop3_authzid_DEPENDENCIES = \
@USE_EXPLICIT_LIB_DEPS_TRUE@ $(LIBDIR)/libcurl.la
pop3_dele_SOURCES = pop3-dele.c
pop3_dele_OBJECTS = pop3-dele.$(OBJEXT)
pop3_dele_LDADD = $(LDADD)
@ -716,6 +751,13 @@ simplessl_LDADD = $(LDADD)
@USE_EXPLICIT_LIB_DEPS_FALSE@ $(LIBDIR)/libcurl.la
@USE_EXPLICIT_LIB_DEPS_TRUE@simplessl_DEPENDENCIES = \
@USE_EXPLICIT_LIB_DEPS_TRUE@ $(LIBDIR)/libcurl.la
smtp_authzid_SOURCES = smtp-authzid.c
smtp_authzid_OBJECTS = smtp-authzid.$(OBJEXT)
smtp_authzid_LDADD = $(LDADD)
@USE_EXPLICIT_LIB_DEPS_FALSE@smtp_authzid_DEPENDENCIES = \
@USE_EXPLICIT_LIB_DEPS_FALSE@ $(LIBDIR)/libcurl.la
@USE_EXPLICIT_LIB_DEPS_TRUE@smtp_authzid_DEPENDENCIES = \
@USE_EXPLICIT_LIB_DEPS_TRUE@ $(LIBDIR)/libcurl.la
smtp_expn_SOURCES = smtp-expn.c
smtp_expn_OBJECTS = smtp-expn.$(OBJEXT)
smtp_expn_LDADD = $(LDADD)
@ -802,32 +844,34 @@ DEFAULT_INCLUDES =
depcomp = $(SHELL) $(top_srcdir)/depcomp
am__maybe_remake_depfiles = depfiles
am__depfiles_remade = ./$(DEPDIR)/10-at-a-time.Po \
./$(DEPDIR)/anyauthput.Po ./$(DEPDIR)/certinfo.Po \
./$(DEPDIR)/chkspeed.Po ./$(DEPDIR)/cookie_interface.Po \
./$(DEPDIR)/debug.Po ./$(DEPDIR)/externalsocket.Po \
./$(DEPDIR)/fileupload.Po ./$(DEPDIR)/fopen.Po \
./$(DEPDIR)/ftp-wildcard.Po ./$(DEPDIR)/ftpget.Po \
./$(DEPDIR)/ftpgetinfo.Po ./$(DEPDIR)/ftpgetresp.Po \
./$(DEPDIR)/ftpsget.Po ./$(DEPDIR)/ftpupload.Po \
./$(DEPDIR)/ftpuploadfrommem.Po ./$(DEPDIR)/ftpuploadresume.Po \
./$(DEPDIR)/getinfo.Po ./$(DEPDIR)/getinmemory.Po \
./$(DEPDIR)/getredirect.Po ./$(DEPDIR)/http-post.Po \
./$(DEPDIR)/http2-download.Po \
./$(DEPDIR)/altsvc.Po ./$(DEPDIR)/anyauthput.Po \
./$(DEPDIR)/certinfo.Po ./$(DEPDIR)/chkspeed.Po \
./$(DEPDIR)/cookie_interface.Po ./$(DEPDIR)/debug.Po \
./$(DEPDIR)/externalsocket.Po ./$(DEPDIR)/fileupload.Po \
./$(DEPDIR)/fopen.Po ./$(DEPDIR)/ftp-wildcard.Po \
./$(DEPDIR)/ftpget.Po ./$(DEPDIR)/ftpgetinfo.Po \
./$(DEPDIR)/ftpgetresp.Po ./$(DEPDIR)/ftpsget.Po \
./$(DEPDIR)/ftpupload.Po ./$(DEPDIR)/ftpuploadfrommem.Po \
./$(DEPDIR)/ftpuploadresume.Po ./$(DEPDIR)/getinfo.Po \
./$(DEPDIR)/getinmemory.Po ./$(DEPDIR)/getredirect.Po \
./$(DEPDIR)/http-post.Po ./$(DEPDIR)/http2-download.Po \
./$(DEPDIR)/http2-pushinmemory.Po \
./$(DEPDIR)/http2-serverpush.Po ./$(DEPDIR)/http2-upload.Po \
./$(DEPDIR)/http3-present.Po ./$(DEPDIR)/http3.Po \
./$(DEPDIR)/httpcustomheader.Po ./$(DEPDIR)/httpput.Po \
./$(DEPDIR)/https.Po ./$(DEPDIR)/imap-append.Po \
./$(DEPDIR)/imap-copy.Po ./$(DEPDIR)/imap-create.Po \
./$(DEPDIR)/imap-delete.Po ./$(DEPDIR)/imap-examine.Po \
./$(DEPDIR)/imap-fetch.Po ./$(DEPDIR)/imap-list.Po \
./$(DEPDIR)/imap-lsub.Po ./$(DEPDIR)/imap-multi.Po \
./$(DEPDIR)/imap-noop.Po ./$(DEPDIR)/imap-search.Po \
./$(DEPDIR)/imap-ssl.Po ./$(DEPDIR)/imap-store.Po \
./$(DEPDIR)/imap-tls.Po ./$(DEPDIR)/multi-app.Po \
./$(DEPDIR)/multi-debugcallback.Po ./$(DEPDIR)/multi-double.Po \
./$(DEPDIR)/multi-formadd.Po ./$(DEPDIR)/multi-post.Po \
./$(DEPDIR)/multi-single.Po ./$(DEPDIR)/parseurl.Po \
./$(DEPDIR)/persistent.Po ./$(DEPDIR)/pop3-dele.Po \
./$(DEPDIR)/imap-authzid.Po ./$(DEPDIR)/imap-copy.Po \
./$(DEPDIR)/imap-create.Po ./$(DEPDIR)/imap-delete.Po \
./$(DEPDIR)/imap-examine.Po ./$(DEPDIR)/imap-fetch.Po \
./$(DEPDIR)/imap-list.Po ./$(DEPDIR)/imap-lsub.Po \
./$(DEPDIR)/imap-multi.Po ./$(DEPDIR)/imap-noop.Po \
./$(DEPDIR)/imap-search.Po ./$(DEPDIR)/imap-ssl.Po \
./$(DEPDIR)/imap-store.Po ./$(DEPDIR)/imap-tls.Po \
./$(DEPDIR)/multi-app.Po ./$(DEPDIR)/multi-debugcallback.Po \
./$(DEPDIR)/multi-double.Po ./$(DEPDIR)/multi-formadd.Po \
./$(DEPDIR)/multi-post.Po ./$(DEPDIR)/multi-single.Po \
./$(DEPDIR)/parseurl.Po ./$(DEPDIR)/persistent.Po \
./$(DEPDIR)/pop3-authzid.Po ./$(DEPDIR)/pop3-dele.Po \
./$(DEPDIR)/pop3-list.Po ./$(DEPDIR)/pop3-multi.Po \
./$(DEPDIR)/pop3-noop.Po ./$(DEPDIR)/pop3-retr.Po \
./$(DEPDIR)/pop3-ssl.Po ./$(DEPDIR)/pop3-stat.Po \
@ -840,11 +884,12 @@ am__depfiles_remade = ./$(DEPDIR)/10-at-a-time.Po \
./$(DEPDIR)/sftpget.Po ./$(DEPDIR)/sftpuploadresume.Po \
./$(DEPDIR)/shared-connection-cache.Po ./$(DEPDIR)/simple.Po \
./$(DEPDIR)/simplepost.Po ./$(DEPDIR)/simplessl.Po \
./$(DEPDIR)/smtp-expn.Po ./$(DEPDIR)/smtp-mail.Po \
./$(DEPDIR)/smtp-mime.Po ./$(DEPDIR)/smtp-multi.Po \
./$(DEPDIR)/smtp-ssl.Po ./$(DEPDIR)/smtp-tls.Po \
./$(DEPDIR)/smtp-vrfy.Po ./$(DEPDIR)/sslbackend.Po \
./$(DEPDIR)/url2file.Po ./$(DEPDIR)/urlapi.Po
./$(DEPDIR)/smtp-authzid.Po ./$(DEPDIR)/smtp-expn.Po \
./$(DEPDIR)/smtp-mail.Po ./$(DEPDIR)/smtp-mime.Po \
./$(DEPDIR)/smtp-multi.Po ./$(DEPDIR)/smtp-ssl.Po \
./$(DEPDIR)/smtp-tls.Po ./$(DEPDIR)/smtp-vrfy.Po \
./$(DEPDIR)/sslbackend.Po ./$(DEPDIR)/url2file.Po \
./$(DEPDIR)/urlapi.Po
am__mv = mv -f
COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
$(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
@ -864,46 +909,50 @@ AM_V_CCLD = $(am__v_CCLD_@AM_V@)
am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@)
am__v_CCLD_0 = @echo " CCLD " $@;
am__v_CCLD_1 =
SOURCES = 10-at-a-time.c anyauthput.c certinfo.c chkspeed.c \
SOURCES = 10-at-a-time.c altsvc.c anyauthput.c certinfo.c chkspeed.c \
cookie_interface.c debug.c externalsocket.c fileupload.c \
fopen.c ftp-wildcard.c ftpget.c ftpgetinfo.c ftpgetresp.c \
ftpsget.c ftpupload.c ftpuploadfrommem.c ftpuploadresume.c \
getinfo.c getinmemory.c getredirect.c http-post.c \
http2-download.c http2-pushinmemory.c http2-serverpush.c \
http2-upload.c httpcustomheader.c httpput.c https.c \
imap-append.c imap-copy.c imap-create.c imap-delete.c \
http2-upload.c http3.c http3-present.c httpcustomheader.c \
httpput.c https.c imap-append.c imap-authzid.c imap-copy.c \
imap-create.c imap-delete.c imap-examine.c imap-fetch.c \
imap-list.c imap-lsub.c imap-multi.c imap-noop.c imap-search.c \
imap-ssl.c imap-store.c imap-tls.c multi-app.c \
multi-debugcallback.c multi-double.c multi-formadd.c \
multi-post.c multi-single.c parseurl.c persistent.c \
pop3-authzid.c pop3-dele.c pop3-list.c pop3-multi.c \
pop3-noop.c pop3-retr.c pop3-ssl.c pop3-stat.c pop3-tls.c \
pop3-top.c pop3-uidl.c post-callback.c postinmemory.c \
postit2.c postit2-formadd.c progressfunc.c resolve.c rtsp.c \
sendrecv.c sepheaders.c sftpget.c sftpuploadresume.c \
shared-connection-cache.c simple.c simplepost.c simplessl.c \
smtp-authzid.c smtp-expn.c smtp-mail.c smtp-mime.c \
smtp-multi.c smtp-ssl.c smtp-tls.c smtp-vrfy.c sslbackend.c \
url2file.c urlapi.c
DIST_SOURCES = 10-at-a-time.c altsvc.c anyauthput.c certinfo.c \
chkspeed.c cookie_interface.c debug.c externalsocket.c \
fileupload.c fopen.c ftp-wildcard.c ftpget.c ftpgetinfo.c \
ftpgetresp.c ftpsget.c ftpupload.c ftpuploadfrommem.c \
ftpuploadresume.c getinfo.c getinmemory.c getredirect.c \
http-post.c http2-download.c http2-pushinmemory.c \
http2-serverpush.c http2-upload.c http3.c http3-present.c \
httpcustomheader.c httpput.c https.c imap-append.c \
imap-authzid.c imap-copy.c imap-create.c imap-delete.c \
imap-examine.c imap-fetch.c imap-list.c imap-lsub.c \
imap-multi.c imap-noop.c imap-search.c imap-ssl.c imap-store.c \
imap-tls.c multi-app.c multi-debugcallback.c multi-double.c \
multi-formadd.c multi-post.c multi-single.c parseurl.c \
persistent.c pop3-dele.c pop3-list.c pop3-multi.c pop3-noop.c \
pop3-retr.c pop3-ssl.c pop3-stat.c pop3-tls.c pop3-top.c \
pop3-uidl.c post-callback.c postinmemory.c postit2.c \
postit2-formadd.c progressfunc.c resolve.c rtsp.c sendrecv.c \
sepheaders.c sftpget.c sftpuploadresume.c \
shared-connection-cache.c simple.c simplepost.c simplessl.c \
smtp-expn.c smtp-mail.c smtp-mime.c smtp-multi.c smtp-ssl.c \
smtp-tls.c smtp-vrfy.c sslbackend.c url2file.c urlapi.c
DIST_SOURCES = 10-at-a-time.c anyauthput.c certinfo.c chkspeed.c \
cookie_interface.c debug.c externalsocket.c fileupload.c \
fopen.c ftp-wildcard.c ftpget.c ftpgetinfo.c ftpgetresp.c \
ftpsget.c ftpupload.c ftpuploadfrommem.c ftpuploadresume.c \
getinfo.c getinmemory.c getredirect.c http-post.c \
http2-download.c http2-pushinmemory.c http2-serverpush.c \
http2-upload.c httpcustomheader.c httpput.c https.c \
imap-append.c imap-copy.c imap-create.c imap-delete.c \
imap-examine.c imap-fetch.c imap-list.c imap-lsub.c \
imap-multi.c imap-noop.c imap-search.c imap-ssl.c imap-store.c \
imap-tls.c multi-app.c multi-debugcallback.c multi-double.c \
multi-formadd.c multi-post.c multi-single.c parseurl.c \
persistent.c pop3-dele.c pop3-list.c pop3-multi.c pop3-noop.c \
pop3-retr.c pop3-ssl.c pop3-stat.c pop3-tls.c pop3-top.c \
pop3-uidl.c post-callback.c postinmemory.c postit2.c \
postit2-formadd.c progressfunc.c resolve.c rtsp.c sendrecv.c \
sepheaders.c sftpget.c sftpuploadresume.c \
shared-connection-cache.c simple.c simplepost.c simplessl.c \
smtp-expn.c smtp-mail.c smtp-mime.c smtp-multi.c smtp-ssl.c \
smtp-tls.c smtp-vrfy.c sslbackend.c url2file.c urlapi.c
persistent.c pop3-authzid.c pop3-dele.c pop3-list.c \
pop3-multi.c pop3-noop.c pop3-retr.c pop3-ssl.c pop3-stat.c \
pop3-tls.c pop3-top.c pop3-uidl.c post-callback.c \
postinmemory.c postit2.c postit2-formadd.c progressfunc.c \
resolve.c rtsp.c sendrecv.c sepheaders.c sftpget.c \
sftpuploadresume.c shared-connection-cache.c simple.c \
simplepost.c simplessl.c smtp-authzid.c smtp-expn.c \
smtp-mail.c smtp-mime.c smtp-multi.c smtp-ssl.c smtp-tls.c \
smtp-vrfy.c sslbackend.c url2file.c urlapi.c
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
@ -948,12 +997,6 @@ CCDEPMODE = @CCDEPMODE@
# This might hold -Werror
CFLAGS = @CFLAGS@ @CURL_CFLAG_EXTRAS@
CFLAG_CURL_SYMBOL_HIDING = @CFLAG_CURL_SYMBOL_HIDING@
CODE_COVERAGE_CFLAGS = @CODE_COVERAGE_CFLAGS@
CODE_COVERAGE_CPPFLAGS = @CODE_COVERAGE_CPPFLAGS@
CODE_COVERAGE_CXXFLAGS = @CODE_COVERAGE_CXXFLAGS@
CODE_COVERAGE_ENABLED = @CODE_COVERAGE_ENABLED@
CODE_COVERAGE_LDFLAGS = @CODE_COVERAGE_LDFLAGS@
CODE_COVERAGE_LIBS = @CODE_COVERAGE_LIBS@
CONFIGURE_OPTIONS = @CONFIGURE_OPTIONS@
CPP = @CPP@
CPPFLAGS = @CPPFLAGS@
@ -995,14 +1038,15 @@ ENABLE_SHARED = @ENABLE_SHARED@
ENABLE_STATIC = @ENABLE_STATIC@
EXEEXT = @EXEEXT@
FGREP = @FGREP@
FISH_FUNCTIONS_DIR = @FISH_FUNCTIONS_DIR@
GCOV = @GCOV@
GENHTML = @GENHTML@
GREP = @GREP@
HAVE_BROTLI = @HAVE_BROTLI@
HAVE_GNUTLS_SRP = @HAVE_GNUTLS_SRP@
HAVE_LDAP_SSL = @HAVE_LDAP_SSL@
HAVE_LIBZ = @HAVE_LIBZ@
HAVE_OPENSSL_SRP = @HAVE_OPENSSL_SRP@
HAVE_PROTO_BSDSOCKET_H = @HAVE_PROTO_BSDSOCKET_H@
IDN_ENABLED = @IDN_ENABLED@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
@ -1065,8 +1109,6 @@ STRIP = @STRIP@
SUPPORT_FEATURES = @SUPPORT_FEATURES@
SUPPORT_PROTOCOLS = @SUPPORT_PROTOCOLS@
USE_ARES = @USE_ARES@
USE_CYASSL = @USE_CYASSL@
USE_DARWINSSL = @USE_DARWINSSL@
USE_GNUTLS = @USE_GNUTLS@
USE_GNUTLS_NETTLE = @USE_GNUTLS_NETTLE@
USE_LIBRTMP = @USE_LIBRTMP@
@ -1075,12 +1117,17 @@ USE_LIBSSH2 = @USE_LIBSSH2@
USE_MBEDTLS = @USE_MBEDTLS@
USE_MESALINK = @USE_MESALINK@
USE_NGHTTP2 = @USE_NGHTTP2@
USE_NGHTTP3 = @USE_NGHTTP3@
USE_NGTCP2 = @USE_NGTCP2@
USE_NGTCP2_CRYPTO_OPENSSL = @USE_NGTCP2_CRYPTO_OPENSSL@
USE_NSS = @USE_NSS@
USE_OPENLDAP = @USE_OPENLDAP@
USE_POLARSSL = @USE_POLARSSL@
USE_QUICHE = @USE_QUICHE@
USE_SCHANNEL = @USE_SCHANNEL@
USE_SECTRANSP = @USE_SECTRANSP@
USE_UNIX_SOCKETS = @USE_UNIX_SOCKETS@
USE_WINDOWS_SSPI = @USE_WINDOWS_SSPI@
USE_WOLFSSL = @USE_WOLFSSL@
VERSION = @VERSION@
VERSIONNUM = @VERSIONNUM@
ZLIB_LIBS = @ZLIB_LIBS@
@ -1167,8 +1214,12 @@ COMPLICATED_EXAMPLES = curlgtk.c curlx.c htmltitle.cpp cacertinmem.c \
sampleconv.c synctime.c threaded-ssl.c evhiperfifo.c \
smooth-gtk-thread.c version-check.pl href_extractor.c asiohiper.cpp \
multi-uv.c xmlstream.c usercertinmem.c sessioninfo.c \
threaded-shared-conn.c crawler.c ephiperfifo.c
threaded-shared-conn.c crawler.c ephiperfifo.c multi-event.c
CHECKSRC = $(CS_$(V))
CS_0 = @echo " RUN " $@;
CS_1 =
CS_ = $(CS_0)
all: all-am
.SUFFIXES:
@ -1217,6 +1268,10 @@ clean-checkPROGRAMS:
@rm -f 10-at-a-time$(EXEEXT)
$(AM_V_CCLD)$(LINK) $(10_at_a_time_OBJECTS) $(10_at_a_time_LDADD) $(LIBS)
altsvc$(EXEEXT): $(altsvc_OBJECTS) $(altsvc_DEPENDENCIES) $(EXTRA_altsvc_DEPENDENCIES)
@rm -f altsvc$(EXEEXT)
$(AM_V_CCLD)$(LINK) $(altsvc_OBJECTS) $(altsvc_LDADD) $(LIBS)
anyauthput$(EXEEXT): $(anyauthput_OBJECTS) $(anyauthput_DEPENDENCIES) $(EXTRA_anyauthput_DEPENDENCIES)
@rm -f anyauthput$(EXEEXT)
$(AM_V_CCLD)$(LINK) $(anyauthput_OBJECTS) $(anyauthput_LDADD) $(LIBS)
@ -1313,6 +1368,14 @@ http2-upload$(EXEEXT): $(http2_upload_OBJECTS) $(http2_upload_DEPENDENCIES) $(EX
@rm -f http2-upload$(EXEEXT)
$(AM_V_CCLD)$(LINK) $(http2_upload_OBJECTS) $(http2_upload_LDADD) $(LIBS)
http3$(EXEEXT): $(http3_OBJECTS) $(http3_DEPENDENCIES) $(EXTRA_http3_DEPENDENCIES)
@rm -f http3$(EXEEXT)
$(AM_V_CCLD)$(LINK) $(http3_OBJECTS) $(http3_LDADD) $(LIBS)
http3-present$(EXEEXT): $(http3_present_OBJECTS) $(http3_present_DEPENDENCIES) $(EXTRA_http3_present_DEPENDENCIES)
@rm -f http3-present$(EXEEXT)
$(AM_V_CCLD)$(LINK) $(http3_present_OBJECTS) $(http3_present_LDADD) $(LIBS)
httpcustomheader$(EXEEXT): $(httpcustomheader_OBJECTS) $(httpcustomheader_DEPENDENCIES) $(EXTRA_httpcustomheader_DEPENDENCIES)
@rm -f httpcustomheader$(EXEEXT)
$(AM_V_CCLD)$(LINK) $(httpcustomheader_OBJECTS) $(httpcustomheader_LDADD) $(LIBS)
@ -1329,6 +1392,10 @@ imap-append$(EXEEXT): $(imap_append_OBJECTS) $(imap_append_DEPENDENCIES) $(EXTRA
@rm -f imap-append$(EXEEXT)
$(AM_V_CCLD)$(LINK) $(imap_append_OBJECTS) $(imap_append_LDADD) $(LIBS)
imap-authzid$(EXEEXT): $(imap_authzid_OBJECTS) $(imap_authzid_DEPENDENCIES) $(EXTRA_imap_authzid_DEPENDENCIES)
@rm -f imap-authzid$(EXEEXT)
$(AM_V_CCLD)$(LINK) $(imap_authzid_OBJECTS) $(imap_authzid_LDADD) $(LIBS)
imap-copy$(EXEEXT): $(imap_copy_OBJECTS) $(imap_copy_DEPENDENCIES) $(EXTRA_imap_copy_DEPENDENCIES)
@rm -f imap-copy$(EXEEXT)
$(AM_V_CCLD)$(LINK) $(imap_copy_OBJECTS) $(imap_copy_LDADD) $(LIBS)
@ -1413,6 +1480,10 @@ persistent$(EXEEXT): $(persistent_OBJECTS) $(persistent_DEPENDENCIES) $(EXTRA_pe
@rm -f persistent$(EXEEXT)
$(AM_V_CCLD)$(LINK) $(persistent_OBJECTS) $(persistent_LDADD) $(LIBS)
pop3-authzid$(EXEEXT): $(pop3_authzid_OBJECTS) $(pop3_authzid_DEPENDENCIES) $(EXTRA_pop3_authzid_DEPENDENCIES)
@rm -f pop3-authzid$(EXEEXT)
$(AM_V_CCLD)$(LINK) $(pop3_authzid_OBJECTS) $(pop3_authzid_LDADD) $(LIBS)
pop3-dele$(EXEEXT): $(pop3_dele_OBJECTS) $(pop3_dele_DEPENDENCIES) $(EXTRA_pop3_dele_DEPENDENCIES)
@rm -f pop3-dele$(EXEEXT)
$(AM_V_CCLD)$(LINK) $(pop3_dele_OBJECTS) $(pop3_dele_LDADD) $(LIBS)
@ -1513,6 +1584,10 @@ simplessl$(EXEEXT): $(simplessl_OBJECTS) $(simplessl_DEPENDENCIES) $(EXTRA_simpl
@rm -f simplessl$(EXEEXT)
$(AM_V_CCLD)$(LINK) $(simplessl_OBJECTS) $(simplessl_LDADD) $(LIBS)
smtp-authzid$(EXEEXT): $(smtp_authzid_OBJECTS) $(smtp_authzid_DEPENDENCIES) $(EXTRA_smtp_authzid_DEPENDENCIES)
@rm -f smtp-authzid$(EXEEXT)
$(AM_V_CCLD)$(LINK) $(smtp_authzid_OBJECTS) $(smtp_authzid_LDADD) $(LIBS)
smtp-expn$(EXEEXT): $(smtp_expn_OBJECTS) $(smtp_expn_DEPENDENCIES) $(EXTRA_smtp_expn_DEPENDENCIES)
@rm -f smtp-expn$(EXEEXT)
$(AM_V_CCLD)$(LINK) $(smtp_expn_OBJECTS) $(smtp_expn_LDADD) $(LIBS)
@ -1560,6 +1635,7 @@ distclean-compile:
-rm -f *.tab.c
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/10-at-a-time.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/altsvc.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/anyauthput.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/certinfo.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/chkspeed.Po@am__quote@ # am--include-marker
@ -1584,10 +1660,13 @@ distclean-compile:
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/http2-pushinmemory.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/http2-serverpush.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/http2-upload.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/http3-present.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/http3.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/httpcustomheader.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/httpput.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/https.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/imap-append.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/imap-authzid.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/imap-copy.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/imap-create.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/imap-delete.Po@am__quote@ # am--include-marker
@ -1609,6 +1688,7 @@ distclean-compile:
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/multi-single.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/parseurl.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/persistent.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/pop3-authzid.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/pop3-dele.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/pop3-list.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/pop3-multi.Po@am__quote@ # am--include-marker
@ -1634,6 +1714,7 @@ distclean-compile:
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/simple.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/simplepost.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/simplessl.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/smtp-authzid.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/smtp-expn.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/smtp-mail.Po@am__quote@ # am--include-marker
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/smtp-mime.Po@am__quote@ # am--include-marker
@ -1808,6 +1889,7 @@ clean-am: clean-checkPROGRAMS clean-generic clean-libtool \
distclean: distclean-am
-rm -f ./$(DEPDIR)/10-at-a-time.Po
-rm -f ./$(DEPDIR)/altsvc.Po
-rm -f ./$(DEPDIR)/anyauthput.Po
-rm -f ./$(DEPDIR)/certinfo.Po
-rm -f ./$(DEPDIR)/chkspeed.Po
@ -1832,10 +1914,13 @@ distclean: distclean-am
-rm -f ./$(DEPDIR)/http2-pushinmemory.Po
-rm -f ./$(DEPDIR)/http2-serverpush.Po
-rm -f ./$(DEPDIR)/http2-upload.Po
-rm -f ./$(DEPDIR)/http3-present.Po
-rm -f ./$(DEPDIR)/http3.Po
-rm -f ./$(DEPDIR)/httpcustomheader.Po
-rm -f ./$(DEPDIR)/httpput.Po
-rm -f ./$(DEPDIR)/https.Po
-rm -f ./$(DEPDIR)/imap-append.Po
-rm -f ./$(DEPDIR)/imap-authzid.Po
-rm -f ./$(DEPDIR)/imap-copy.Po
-rm -f ./$(DEPDIR)/imap-create.Po
-rm -f ./$(DEPDIR)/imap-delete.Po
@ -1857,6 +1942,7 @@ distclean: distclean-am
-rm -f ./$(DEPDIR)/multi-single.Po
-rm -f ./$(DEPDIR)/parseurl.Po
-rm -f ./$(DEPDIR)/persistent.Po
-rm -f ./$(DEPDIR)/pop3-authzid.Po
-rm -f ./$(DEPDIR)/pop3-dele.Po
-rm -f ./$(DEPDIR)/pop3-list.Po
-rm -f ./$(DEPDIR)/pop3-multi.Po
@ -1882,6 +1968,7 @@ distclean: distclean-am
-rm -f ./$(DEPDIR)/simple.Po
-rm -f ./$(DEPDIR)/simplepost.Po
-rm -f ./$(DEPDIR)/simplessl.Po
-rm -f ./$(DEPDIR)/smtp-authzid.Po
-rm -f ./$(DEPDIR)/smtp-expn.Po
-rm -f ./$(DEPDIR)/smtp-mail.Po
-rm -f ./$(DEPDIR)/smtp-mime.Po
@ -1938,6 +2025,7 @@ installcheck-am:
maintainer-clean: maintainer-clean-am
-rm -f ./$(DEPDIR)/10-at-a-time.Po
-rm -f ./$(DEPDIR)/altsvc.Po
-rm -f ./$(DEPDIR)/anyauthput.Po
-rm -f ./$(DEPDIR)/certinfo.Po
-rm -f ./$(DEPDIR)/chkspeed.Po
@ -1962,10 +2050,13 @@ maintainer-clean: maintainer-clean-am
-rm -f ./$(DEPDIR)/http2-pushinmemory.Po
-rm -f ./$(DEPDIR)/http2-serverpush.Po
-rm -f ./$(DEPDIR)/http2-upload.Po
-rm -f ./$(DEPDIR)/http3-present.Po
-rm -f ./$(DEPDIR)/http3.Po
-rm -f ./$(DEPDIR)/httpcustomheader.Po
-rm -f ./$(DEPDIR)/httpput.Po
-rm -f ./$(DEPDIR)/https.Po
-rm -f ./$(DEPDIR)/imap-append.Po
-rm -f ./$(DEPDIR)/imap-authzid.Po
-rm -f ./$(DEPDIR)/imap-copy.Po
-rm -f ./$(DEPDIR)/imap-create.Po
-rm -f ./$(DEPDIR)/imap-delete.Po
@ -1987,6 +2078,7 @@ maintainer-clean: maintainer-clean-am
-rm -f ./$(DEPDIR)/multi-single.Po
-rm -f ./$(DEPDIR)/parseurl.Po
-rm -f ./$(DEPDIR)/persistent.Po
-rm -f ./$(DEPDIR)/pop3-authzid.Po
-rm -f ./$(DEPDIR)/pop3-dele.Po
-rm -f ./$(DEPDIR)/pop3-list.Po
-rm -f ./$(DEPDIR)/pop3-multi.Po
@ -2012,6 +2104,7 @@ maintainer-clean: maintainer-clean-am
-rm -f ./$(DEPDIR)/simple.Po
-rm -f ./$(DEPDIR)/simplepost.Po
-rm -f ./$(DEPDIR)/simplessl.Po
-rm -f ./$(DEPDIR)/smtp-authzid.Po
-rm -f ./$(DEPDIR)/smtp-expn.Po
-rm -f ./$(DEPDIR)/smtp-mail.Po
-rm -f ./$(DEPDIR)/smtp-mime.Po
@ -2064,7 +2157,7 @@ uninstall-am:
all: $(check_PROGRAMS)
checksrc:
@PERL@ $(top_srcdir)/lib/checksrc.pl -ASNPRINTF $(srcdir)/*.c
$(CHECKSRC)(@PERL@ $(top_srcdir)/lib/checksrc.pl -ASNPRINTF $(srcdir)/*.c)
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.

View file

@ -5,7 +5,7 @@
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 1998 - 2018, Daniel Stenberg, <daniel@haxx.se>, et al.
# Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
@ -35,7 +35,8 @@ check_PROGRAMS = 10-at-a-time anyauthput cookie_interface debug fileupload \
http2-upload http2-serverpush getredirect ftpuploadfrommem \
ftpuploadresume sslbackend postit2-formadd multi-formadd \
shared-connection-cache sftpuploadresume http2-pushinmemory parseurl \
urlapi
urlapi imap-authzid pop3-authzid smtp-authzid http3 altsvc \
http3-present
# These examples require external dependencies that may not be commonly
# available on POSIX systems, so don't bother attempting to compile them here.
@ -44,4 +45,4 @@ COMPLICATED_EXAMPLES = curlgtk.c curlx.c htmltitle.cpp cacertinmem.c \
sampleconv.c synctime.c threaded-ssl.c evhiperfifo.c \
smooth-gtk-thread.c version-check.pl href_extractor.c asiohiper.cpp \
multi-uv.c xmlstream.c usercertinmem.c sessioninfo.c \
threaded-shared-conn.c crawler.c ephiperfifo.c
threaded-shared-conn.c crawler.c ephiperfifo.c multi-event.c

View file

@ -0,0 +1,56 @@
/***************************************************************************
* _ _ ____ _
* Project ___| | | | _ \| |
* / __| | | | |_) | |
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at https://curl.haxx.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
* furnished to do so, under the terms of the COPYING file.
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
* KIND, either express or implied.
*
***************************************************************************/
/* <DESC>
* HTTP with Alt-Svc support
* </DESC>
*/
#include <stdio.h>
#include <curl/curl.h>
int main(void)
{
CURL *curl;
CURLcode res;
curl = curl_easy_init();
if(curl) {
curl_easy_setopt(curl, CURLOPT_URL, "https://example.com");
/* cache the alternatives in this file */
curl_easy_setopt(curl, CURLOPT_ALTSVC, "altsvc.txt");
/* restrict which HTTP versions to use alternatives */
curl_easy_setopt(curl, CURLOPT_ALTSVC_CTRL, (long)
CURLALTSVC_H1|CURLALTSVC_H2|CURLALTSVC_H3);
/* Perform the request, res will get the return code */
res = curl_easy_perform(curl);
/* Check for errors */
if(res != CURLE_OK)
fprintf(stderr, "curl_easy_perform() failed: %s\n",
curl_easy_strerror(res));
/* always cleanup */
curl_easy_cleanup(curl);
}
return 0;
}

View file

@ -5,7 +5,7 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2018, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
@ -29,7 +29,7 @@
#include <curl/curl.h>
#include <stdio.h>
size_t writefunction(void *ptr, size_t size, size_t nmemb, void *stream)
static size_t writefunction(void *ptr, size_t size, size_t nmemb, void *stream)
{
fwrite(ptr, size, nmemb, (FILE *)stream);
return (nmemb*size);
@ -38,88 +38,83 @@ size_t writefunction(void *ptr, size_t size, size_t nmemb, void *stream)
static CURLcode sslctx_function(CURL *curl, void *sslctx, void *parm)
{
CURLcode rv = CURLE_ABORTED_BY_CALLBACK;
X509_STORE *store = NULL;
X509 *cert = NULL;
BIO *bio = NULL;
char *mypem =
/* CA for example.com. CN = DigiCert High Assurance EV Root CA */
/** This example uses two (fake) certificates **/
static const char mypem[] =
"-----BEGIN CERTIFICATE-----\n"
"MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs\n"
"MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3\n"
"d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j\n"
"ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL\n"
"MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3\n"
"LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug\n"
"RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm\n"
"+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW\n"
"PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM\n"
"xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB\n"
"Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3\n"
"hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg\n"
"EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF\n"
"MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA\n"
"FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec\n"
"nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z\n"
"eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF\n"
"hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2\n"
"Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe\n"
"vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep\n"
"+OkuE6N36B9K\n"
"MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE\n"
"AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw\n"
"CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ\n"
"BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND\n"
"VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb\n"
"qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY\n"
"HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo\n"
"G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA\n"
"0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH\n"
"k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47\n"
"JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m\n"
"AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD\n"
"vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms\n"
"tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH\n"
"7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h\n"
"I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA\n"
"h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF\n"
"d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H\n"
"pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7\n"
"-----END CERTIFICATE-----\n"
"-----BEGIN CERTIFICATE-----\n"
"MIIFtTCCA52gAwIBAgIIYY3HhjsBggUwDQYJKoZIhvcNAQEFBQAwRDEWMBQGA1UE\n"
"AwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZFRElDT00x\n"
"CzAJBgNVBAYTAkVTMB4XDTA4MDQxODE2MjQyMloXDTI4MDQxMzE2MjQyMlowRDEW\n"
"MBQGA1UEAwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZF\n"
"RElDT00xCzAJBgNVBAYTAkVTMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKC\n"
"AgEA/5KV4WgGdrQsyFhIyv2AVClVYyT/kGWbEHV7w2rbYgIB8hiGtXxaOLHkWLn7\n"
"09gtn70yN78sFW2+tfQh0hOR2QetAQXW8713zl9CgQr5auODAKgrLlUTY4HKRxx7\n"
"XBZXehuDYAQ6PmXDzQHe3qTWDLqO3tkE7hdWIpuPY/1NFgu3e3eM+SW10W2ZEi5P\n"
"gvoFNTPhNahXwOf9jU8/kzJPeGYDdwdY6ZXIfj7QeQCM8htRM5u8lOk6e25SLTKe\n"
"I6RF+7YuE7CLGLHdztUdp0J/Vb77W7tH1PwkzQSulgUV1qzOMPPKC8W64iLgpq0i\n"
"5ALudBF/TP94HTXa5gI06xgSYXcGCRZj6hitoocf8seACQl1ThCojz2GuHURwCRi\n"
"ipZ7SkXp7FnFvmuD5uHorLUwHv4FB4D54SMNUI8FmP8sX+g7tq3PgbUhh8oIKiMn\n"
"MCArz+2UW6yyetLHKKGKC5tNSixthT8Jcjxn4tncB7rrZXtaAWPWkFtPF2Y9fwsZ\n"
"o5NjEFIqnxQWWOLcpfShFosOkYuByptZ+thrkQdlVV9SH686+5DdaaVbnG0OLLb6\n"
"zqylfDJKZ0DcMDQj3dcEI2bw/FWAp/tmGYI1Z2JwOV5vx+qQQEQIHriy1tvuWacN\n"
"GHk0vFQYXlPKNFHtRQrmjseCNj6nOGOpMCwXEGCSn1WHElkQwg9naRHMTh5+Spqt\n"
"r0CodaxWkHS4oJyleW/c6RrIaQXpuvoDs3zk4E7Czp3otkYNbn5XOmeUwssfnHdK\n"
"Z05phkOTOPu220+DkdRgfks+KzgHVZhepA==\n"
"-----END CERTIFICATE-----\n";
/* clear the current thread's OpenSSL error queue */
ERR_clear_error();
BIO *cbio = BIO_new_mem_buf(mypem, sizeof(mypem));
X509_STORE *cts = SSL_CTX_get_cert_store((SSL_CTX *)sslctx);
int i;
STACK_OF(X509_INFO) *inf;
(void)curl;
(void)parm;
/* get a BIO */
bio = BIO_new_mem_buf(mypem, -1);
if(!bio)
goto err;
/* use it to read the PEM formatted certificate from memory into an X509
* structure that SSL can use
*/
if(!PEM_read_bio_X509(bio, &cert, 0, NULL))
goto err;
/* get a pointer to the X509 certificate store (which may be empty!) */
store = SSL_CTX_get_cert_store((SSL_CTX *)sslctx);
if(!store)
goto err;
/* add our certificate to this store */
if(!X509_STORE_add_cert(store, cert)) {
unsigned long error = ERR_peek_last_error();
/* Ignore error X509_R_CERT_ALREADY_IN_HASH_TABLE which means the
* certificate is already in the store. That could happen if
* libcurl already loaded the certificate from a ca cert bundle
* set at libcurl build-time or runtime.
*/
if(ERR_GET_LIB(error) != ERR_LIB_X509 ||
ERR_GET_REASON(error) != X509_R_CERT_ALREADY_IN_HASH_TABLE)
goto err;
ERR_clear_error();
if(!cts || !cbio) {
return rv;
}
rv = CURLE_OK;
inf = PEM_X509_INFO_read_bio(cbio, NULL, NULL, NULL);
err:
if(rv != CURLE_OK) {
char errbuf[256];
unsigned long error = ERR_peek_last_error();
if(!inf) {
BIO_free(cbio);
return rv;
}
fprintf(stderr, "error adding certificate\n");
if(error) {
ERR_error_string_n(error, errbuf, sizeof(errbuf));
fprintf(stderr, "%s\n", errbuf);
for(i = 0; i < sk_X509_INFO_num(inf); i++) {
X509_INFO *itmp = sk_X509_INFO_value(inf, i);
if(itmp->x509) {
X509_STORE_add_cert(cts, itmp->x509);
}
if(itmp->crl) {
X509_STORE_add_crl(cts, itmp->crl);
}
}
X509_free(cert);
BIO_free(bio);
ERR_clear_error();
sk_X509_INFO_pop_free(inf, X509_INFO_free);
BIO_free(cbio);
rv = CURLE_OK;
return rv;
}
@ -128,26 +123,26 @@ int main(void)
CURL *ch;
CURLcode rv;
rv = curl_global_init(CURL_GLOBAL_ALL);
curl_global_init(CURL_GLOBAL_ALL);
ch = curl_easy_init();
rv = curl_easy_setopt(ch, CURLOPT_VERBOSE, 0L);
rv = curl_easy_setopt(ch, CURLOPT_HEADER, 0L);
rv = curl_easy_setopt(ch, CURLOPT_NOPROGRESS, 1L);
rv = curl_easy_setopt(ch, CURLOPT_NOSIGNAL, 1L);
rv = curl_easy_setopt(ch, CURLOPT_WRITEFUNCTION, *writefunction);
rv = curl_easy_setopt(ch, CURLOPT_WRITEDATA, stdout);
rv = curl_easy_setopt(ch, CURLOPT_HEADERFUNCTION, *writefunction);
rv = curl_easy_setopt(ch, CURLOPT_HEADERDATA, stderr);
rv = curl_easy_setopt(ch, CURLOPT_SSLCERTTYPE, "PEM");
rv = curl_easy_setopt(ch, CURLOPT_SSL_VERIFYPEER, 1L);
rv = curl_easy_setopt(ch, CURLOPT_URL, "https://www.example.com/");
curl_easy_setopt(ch, CURLOPT_VERBOSE, 0L);
curl_easy_setopt(ch, CURLOPT_HEADER, 0L);
curl_easy_setopt(ch, CURLOPT_NOPROGRESS, 1L);
curl_easy_setopt(ch, CURLOPT_NOSIGNAL, 1L);
curl_easy_setopt(ch, CURLOPT_WRITEFUNCTION, *writefunction);
curl_easy_setopt(ch, CURLOPT_WRITEDATA, stdout);
curl_easy_setopt(ch, CURLOPT_HEADERFUNCTION, *writefunction);
curl_easy_setopt(ch, CURLOPT_HEADERDATA, stderr);
curl_easy_setopt(ch, CURLOPT_SSLCERTTYPE, "PEM");
curl_easy_setopt(ch, CURLOPT_SSL_VERIFYPEER, 1L);
curl_easy_setopt(ch, CURLOPT_URL, "https://www.example.com/");
/* turn off the default CA locations (optional)
* otherwise libcurl will load CA certificates from the locations that
* were detected/specified at build-time
/* Turn off the default CA locations, otherwise libcurl will load CA
* certificates from the locations that were detected/specified at
* build-time
*/
rv = curl_easy_setopt(ch, CURLOPT_CAINFO, NULL);
rv = curl_easy_setopt(ch, CURLOPT_CAPATH, NULL);
curl_easy_setopt(ch, CURLOPT_CAINFO, NULL);
curl_easy_setopt(ch, CURLOPT_CAPATH, NULL);
/* first try: retrieve page without ca certificates -> should fail
* unless libcurl was built --with-ca-fallback enabled at build-time
@ -167,13 +162,13 @@ int main(void)
* handle. normally you would set the ssl ctx function before making
* any transfers, and not use this option.
*/
rv = curl_easy_setopt(ch, CURLOPT_FRESH_CONNECT, 1L);
curl_easy_setopt(ch, CURLOPT_FRESH_CONNECT, 1L);
/* second try: retrieve page using cacerts' certificate -> will succeed
* load the certificate by installing a function doing the necessary
* "modifications" to the SSL CONTEXT just before link init
*/
rv = curl_easy_setopt(ch, CURLOPT_SSL_CTX_FUNCTION, *sslctx_function);
curl_easy_setopt(ch, CURLOPT_SSL_CTX_FUNCTION, *sslctx_function);
rv = curl_easy_perform(ch);
if(rv == CURLE_OK)
printf("*** transfer succeeded ***\n");

View file

@ -81,7 +81,7 @@ CURL *make_handle(char *url)
curl_easy_setopt(handle, CURLOPT_PRIVATE, mem);
/* For completeness */
curl_easy_setopt(handle, CURLOPT_ENCODING, "gzip, deflate");
curl_easy_setopt(handle, CURLOPT_ACCEPT_ENCODING, "");
curl_easy_setopt(handle, CURLOPT_TIMEOUT, 5L);
curl_easy_setopt(handle, CURLOPT_FOLLOWLOCATION, 1L);
curl_easy_setopt(handle, CURLOPT_MAXREDIRS, 10L);

View file

@ -45,14 +45,12 @@ int my_progress_func(GtkWidget *bar,
void *my_thread(void *ptr)
{
CURL *curl;
CURLcode res;
FILE *outfile;
gchar *url = ptr;
curl = curl_easy_init();
if(curl) {
gchar *url = ptr;
const char *filename = "test.curl";
outfile = fopen(filename, "wb");
FILE *outfile = fopen(filename, "wb");
curl_easy_setopt(curl, CURLOPT_URL, url);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, outfile);
@ -62,7 +60,7 @@ void *my_thread(void *ptr)
curl_easy_setopt(curl, CURLOPT_PROGRESSFUNCTION, my_progress_func);
curl_easy_setopt(curl, CURLOPT_PROGRESSDATA, Bar);
res = curl_easy_perform(curl);
curl_easy_perform(curl);
fclose(outfile);
/* always cleanup */

View file

@ -277,7 +277,7 @@ int main(int argc, char **argv)
int tabLength = 100;
char *binaryptr;
char *mimetype;
char *mimetype = NULL;
char *mimetypeaccept = NULL;
char *contenttype;
const char **pp;
@ -294,7 +294,7 @@ int main(int argc, char **argv)
binaryptr = malloc(tabLength);
p.verbose = 0;
memset(&p, '\0', sizeof(p));
p.errorbio = BIO_new_fp(stderr, BIO_NOCLOSE);
curl_global_init(CURL_GLOBAL_DEFAULT);
@ -372,7 +372,7 @@ int main(int argc, char **argv)
args++;
}
if(mimetype == NULL || mimetypeaccept == NULL)
if(mimetype == NULL || mimetypeaccept == NULL || p.p12file == NULL)
badarg = 1;
if(badarg) {
@ -544,7 +544,7 @@ int main(int argc, char **argv)
BIO_printf(p.errorbio, "%d %s %d\n", __LINE__, "curl_easy_perform",
res = curl_easy_perform(p.curl));
{
int result = curl_easy_getinfo(p.curl, CURLINFO_CONTENT_TYPE, &response);
curl_easy_getinfo(p.curl, CURLINFO_CONTENT_TYPE, &response);
if(mimetypeaccept && p.verbose) {
if(!strcmp(mimetypeaccept, response))
BIO_printf(p.errorbio, "the response has a correct mimetype : %s\n",

View file

@ -5,7 +5,7 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2018, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
@ -72,13 +72,6 @@ callback.
#include <unistd.h>
#include <curl/curl.h>
#include <curl/multi.h>
#ifdef __GNUC__
#define _Unused __attribute__((unused))
#else
#define _Unused
#endif
#define MSG_OUT stdout /* Send info to stdout, change to stderr if you want */
@ -115,7 +108,7 @@ typedef struct _SockInfo
GlobalInfo *global;
} SockInfo;
#define __case(code) \
#define mycase(code) \
case code: s = __STRING(code)
/* Die if we get a bad CURLMcode somewhere */
@ -124,14 +117,14 @@ static void mcode_or_die(const char *where, CURLMcode code)
if(CURLM_OK != code) {
const char *s;
switch(code) {
__case(CURLM_BAD_HANDLE); break;
__case(CURLM_BAD_EASY_HANDLE); break;
__case(CURLM_OUT_OF_MEMORY); break;
__case(CURLM_INTERNAL_ERROR); break;
__case(CURLM_UNKNOWN_OPTION); break;
__case(CURLM_LAST); break;
mycase(CURLM_BAD_HANDLE); break;
mycase(CURLM_BAD_EASY_HANDLE); break;
mycase(CURLM_OUT_OF_MEMORY); break;
mycase(CURLM_INTERNAL_ERROR); break;
mycase(CURLM_UNKNOWN_OPTION); break;
mycase(CURLM_LAST); break;
default: s = "CURLM_unknown"; break;
__case(CURLM_BAD_SOCKET);
mycase(CURLM_BAD_SOCKET);
fprintf(MSG_OUT, "ERROR: %s returns %s\n", where, s);
/* ignore this error */
return;
@ -149,27 +142,29 @@ static void timer_cb(GlobalInfo* g, int revents);
static int multi_timer_cb(CURLM *multi, long timeout_ms, GlobalInfo *g)
{
struct itimerspec its;
CURLMcode rc;
fprintf(MSG_OUT, "multi_timer_cb: Setting timeout to %ld ms\n", timeout_ms);
timerfd_settime(g->tfd, /*flags=*/0, &its, NULL);
if(timeout_ms > 0) {
its.it_interval.tv_sec = 1;
its.it_interval.tv_nsec = 0;
its.it_value.tv_sec = timeout_ms / 1000;
its.it_value.tv_nsec = (timeout_ms % 1000) * 1000;
timerfd_settime(g->tfd, /*flags=*/0, &its, NULL);
its.it_value.tv_nsec = (timeout_ms % 1000) * 1000 * 1000;
}
else if(timeout_ms == 0) {
rc = curl_multi_socket_action(g->multi,
CURL_SOCKET_TIMEOUT, 0, &g->still_running);
mcode_or_die("multi_timer_cb: curl_multi_socket_action", rc);
/* libcurl wants us to timeout now, however setting both fields of
* new_value.it_value to zero disarms the timer. The closest we can
* do is to schedule the timer to fire in 1 ns. */
its.it_interval.tv_sec = 1;
its.it_interval.tv_nsec = 0;
its.it_value.tv_sec = 0;
its.it_value.tv_nsec = 1;
}
else {
memset(&its, 0, sizeof(struct itimerspec));
timerfd_settime(g->tfd, /*flags=*/0, &its, NULL);
}
timerfd_settime(g->tfd, /*flags=*/0, &its, NULL);
return 0;
}
@ -206,8 +201,8 @@ static void event_cb(GlobalInfo *g, int fd, int revents)
CURLMcode rc;
struct itimerspec its;
int action = (revents & EPOLLIN ? CURL_POLL_IN : 0) |
(revents & EPOLLOUT ? CURL_POLL_OUT : 0);
int action = ((revents & EPOLLIN) ? CURL_CSELECT_IN : 0) |
((revents & EPOLLOUT) ? CURL_CSELECT_OUT : 0);
rc = curl_multi_socket_action(g->multi, fd, action, &g->still_running);
mcode_or_die("event_cb: curl_multi_socket_action", rc);
@ -272,8 +267,8 @@ static void setsock(SockInfo *f, curl_socket_t s, CURL *e, int act,
GlobalInfo *g)
{
struct epoll_event ev;
int kind = (act & CURL_POLL_IN ? EPOLLIN : 0) |
(act & CURL_POLL_OUT ? EPOLLOUT : 0);
int kind = ((act & CURL_POLL_IN) ? EPOLLIN : 0) |
((act & CURL_POLL_OUT) ? EPOLLOUT : 0);
if(f->sockfd) {
if(epoll_ctl(g->epfd, EPOLL_CTL_DEL, f->sockfd, NULL))
@ -335,21 +330,21 @@ static int sock_cb(CURL *e, curl_socket_t s, int what, void *cbp, void *sockp)
/* CURLOPT_WRITEFUNCTION */
static size_t write_cb(void *ptr _Unused, size_t size, size_t nmemb,
void *data)
static size_t write_cb(void *ptr, size_t size, size_t nmemb, void *data)
{
size_t realsize = size * nmemb;
ConnInfo *conn _Unused = (ConnInfo*) data;
return realsize;
(void)ptr;
(void)data;
return size * nmemb;
}
/* CURLOPT_PROGRESSFUNCTION */
static int prog_cb(void *p, double dltotal, double dlnow, double ult _Unused,
double uln _Unused)
static int prog_cb(void *p, double dltotal, double dlnow, double ult,
double uln)
{
ConnInfo *conn = (ConnInfo *)p;
(void)ult;
(void)uln;
fprintf(MSG_OUT, "Progress: %s (%g/%g)\n", conn->url, dlnow, dltotal);
return 0;
@ -467,14 +462,14 @@ void SignalHandler(int signo)
}
}
int main(int argc _Unused, char **argv _Unused)
int main(int argc, char **argv)
{
GlobalInfo g;
int err;
int idx;
struct itimerspec its;
struct epoll_event ev;
struct epoll_event events[10];
(void)argc;
(void)argv;
g_should_exit_ = 0;
signal(SIGINT, SignalHandler);
@ -516,11 +511,9 @@ int main(int argc _Unused, char **argv _Unused)
fprintf(MSG_OUT, "Entering wait loop\n");
fflush(MSG_OUT);
while(!g_should_exit_) {
/* TODO(josh): use epoll_pwait to avoid a race on the signal. Mask the
* signal before the while loop, and then re-enable the signal during
* epoll wait. Mask at the end of the loop. */
err = epoll_wait(g.epfd, events, sizeof(events)/sizeof(struct epoll_event),
10000);
int idx;
int err = epoll_wait(g.epfd, events,
sizeof(events)/sizeof(struct epoll_event), 10000);
if(err == -1) {
if(errno == EINTR) {
fprintf(MSG_OUT, "note: wait interrupted\n");
@ -549,5 +542,6 @@ int main(int argc _Unused, char **argv _Unused)
fflush(MSG_OUT);
curl_multi_cleanup(g.multi);
clean_fifo(&g);
return 0;
}

View file

@ -5,7 +5,7 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2017, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
@ -119,13 +119,12 @@ static int multi_timer_cb(CURLM *multi, long timeout_ms, GlobalInfo *g)
{
DPRINT("%s %li\n", __PRETTY_FUNCTION__, timeout_ms);
ev_timer_stop(g->loop, &g->timer_event);
if(timeout_ms > 0) {
if(timeout_ms >= 0) {
/* -1 means delete, other values are timeout times in milliseconds */
double t = timeout_ms / 1000;
ev_timer_init(&g->timer_event, timer_cb, t, 0.);
ev_timer_start(g->loop, &g->timer_event);
}
else if(timeout_ms == 0)
timer_cb(g->loop, &g->timer_event, 0);
return 0;
}
@ -204,8 +203,8 @@ static void event_cb(EV_P_ struct ev_io *w, int revents)
GlobalInfo *g = (GlobalInfo*) w->data;
CURLMcode rc;
int action = (revents&EV_READ?CURL_POLL_IN:0)|
(revents&EV_WRITE?CURL_POLL_OUT:0);
int action = ((revents & EV_READ) ? CURL_POLL_IN : 0) |
((revents & EV_WRITE) ? CURL_POLL_OUT : 0);
rc = curl_multi_socket_action(g->multi, w->fd, action, &g->still_running);
mcode_or_die("event_cb: curl_multi_socket_action", rc);
check_multi_info(g);
@ -248,7 +247,8 @@ static void setsock(SockInfo *f, curl_socket_t s, CURL *e, int act,
{
printf("%s \n", __PRETTY_FUNCTION__);
int kind = (act&CURL_POLL_IN?EV_READ:0)|(act&CURL_POLL_OUT?EV_WRITE:0);
int kind = ((act & CURL_POLL_IN) ? EV_READ : 0) |
((act & CURL_POLL_OUT) ? EV_WRITE : 0);
f->sockfd = s;
f->action = act;
@ -422,7 +422,6 @@ static int init_fifo(GlobalInfo *g)
int main(int argc, char **argv)
{
GlobalInfo g;
CURLMcode rc;
(void)argc;
(void)argv;

View file

@ -5,7 +5,7 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2017, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
@ -124,8 +124,10 @@ int main(void)
servaddr.sin_port = htons(PORTNUM);
servaddr.sin_addr.s_addr = inet_addr(IPADDR);
if(INADDR_NONE == servaddr.sin_addr.s_addr)
if(INADDR_NONE == servaddr.sin_addr.s_addr) {
close(sockfd);
return 2;
}
if(connect(sockfd, (struct sockaddr *) &servaddr, sizeof(servaddr)) ==
-1) {
@ -157,10 +159,16 @@ int main(void)
curl_easy_cleanup(curl);
close(sockfd);
if(res) {
printf("libcurl error: %d\n", res);
return 4;
}
}
#ifdef WIN32
WSACleanup();
#endif
return 0;
}

View file

@ -211,7 +211,7 @@ static int fill_buffer(URL_FILE *file, size_t want)
static int use_buffer(URL_FILE *file, size_t want)
{
/* sort out buffer */
if((file->buffer_pos - want) <= 0) {
if(file->buffer_pos <= want) {
/* ditch buffer - write will recreate */
free(file->buffer);
file->buffer = NULL;

View file

@ -5,7 +5,7 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2016, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
@ -41,8 +41,6 @@ static size_t write_it(char *buff, size_t size, size_t nmemb,
int main(int argc, char **argv)
{
int rc = CURLE_OK;
/* curl easy handle */
CURL *handle;
@ -50,7 +48,7 @@ int main(int argc, char **argv)
struct callback_data data = { 0 };
/* global initialization */
rc = curl_global_init(CURL_GLOBAL_ALL);
int rc = curl_global_init(CURL_GLOBAL_ALL);
if(rc)
return rc;

View file

@ -5,7 +5,7 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2017, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
@ -36,7 +36,7 @@ struct FtpFile {
static size_t my_fwrite(void *buffer, size_t size, size_t nmemb, void *stream)
{
struct FtpFile *out = (struct FtpFile *)stream;
if(out && !out->stream) {
if(!out->stream) {
/* open file for writing */
out->stream = fopen(out->filename, "wb");
if(!out->stream)

View file

@ -5,7 +5,7 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2016, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
@ -56,7 +56,6 @@ int main(void)
curl_easy_setopt(curl, CURLOPT_NOBODY, 1L);
/* Ask for filetime */
curl_easy_setopt(curl, CURLOPT_FILETIME, 1L);
/* No header output: TODO 14.1 http-style HEAD output for ftp */
curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, throw_away);
curl_easy_setopt(curl, CURLOPT_HEADER, 0L);
/* Switch on full protocol/debug output */

View file

@ -5,7 +5,7 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2015, 2017, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
@ -38,7 +38,7 @@ static size_t my_fwrite(void *buffer, size_t size, size_t nmemb,
void *stream)
{
struct FtpFile *out = (struct FtpFile *)stream;
if(out && !out->stream) {
if(!out->stream) {
/* open file for writing */
out->stream = fopen(out->filename, "wb");
if(!out->stream)

View file

@ -5,7 +5,7 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2017, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2019, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
@ -163,16 +163,14 @@ static int update_timeout_cb(CURLM *multi, long timeout_ms, void *userp)
MSG_OUT("*** update_timeout_cb %ld => %ld:%ld ***\n",
timeout_ms, timeout.tv_sec, timeout.tv_usec);
/* TODO
*
* if timeout_ms is 0, call curl_multi_socket_action() at once!
*
/*
* if timeout_ms is -1, just delete the timer
*
* for all other values of timeout_ms, this should set or *update*
* the timer to the new value
* For other values of timeout_ms, this should set or *update* the timer to
* the new value
*/
g->timer_event = g_timeout_add(timeout_ms, timer_cb, g);
if(timeout_ms >= 0)
g->timer_event = g_timeout_add(timeout_ms, timer_cb, g);
return 0;
}
@ -184,8 +182,8 @@ static gboolean event_cb(GIOChannel *ch, GIOCondition condition, gpointer data)
int fd = g_io_channel_unix_get_fd(ch);
int action =
(condition & G_IO_IN ? CURL_CSELECT_IN : 0) |
(condition & G_IO_OUT ? CURL_CSELECT_OUT : 0);
((condition & G_IO_IN) ? CURL_CSELECT_IN : 0) |
((condition & G_IO_OUT) ? CURL_CSELECT_OUT : 0);
rc = curl_multi_socket_action(g->multi, fd, action, &g->still_running);
mcode_or_die("event_cb: curl_multi_socket_action", rc);
@ -220,7 +218,8 @@ static void setsock(SockInfo *f, curl_socket_t s, CURL *e, int act,
GlobalInfo *g)
{
GIOCondition kind =
(act&CURL_POLL_IN?G_IO_IN:0)|(act&CURL_POLL_OUT?G_IO_OUT:0);
((act & CURL_POLL_IN) ? G_IO_IN : 0) |
((act & CURL_POLL_OUT) ? G_IO_OUT : 0);
f->sockfd = s;
f->action = act;
@ -257,8 +256,8 @@ static int sock_cb(CURL *e, curl_socket_t s, int what, void *cbp, void *sockp)
else {
if(!fdp) {
MSG_OUT("Adding data: %s%s\n",
what&CURL_POLL_IN?"READ":"",
what&CURL_POLL_OUT?"WRITE":"");
(what & CURL_POLL_IN) ? "READ" : "",
(what & CURL_POLL_OUT) ? "WRITE" : "");
addsock(s, e, what, g);
}
else {
@ -413,7 +412,6 @@ int init_fifo(void)
int main(int argc, char **argv)
{
GlobalInfo *g;
CURLMcode rc;
GMainLoop*gmain;
int fd;
GIOChannel* ch;

Some files were not shown because too many files have changed in this diff Show more