summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/http3-linux.yml34
-rw-r--r--.github/workflows/linux-old.yml3
-rw-r--r--.github/workflows/linux.yml22
-rw-r--r--.github/workflows/macos.yml6
-rw-r--r--.github/workflows/non-native.yml12
-rw-r--r--.github/workflows/windows.yml22
-rw-r--r--appveyor.sh9
-rw-r--r--docs/examples/CMakeLists.txt39
-rw-r--r--lib/vtls/openssl.c4
-rw-r--r--lib/vtls/schannel.c1
-rw-r--r--src/tool_cfgable.c1
-rw-r--r--src/tool_cfgable.h9
-rw-r--r--src/tool_operate.c398
-rw-r--r--src/tool_operate.h2
-rw-r--r--src/tool_operhlp.c2
-rw-r--r--src/tool_parsecfg.c107
-rw-r--r--src/tool_writeout.c2
-rw-r--r--tests/data/test4594
18 files changed, 333 insertions, 344 deletions
diff --git a/.github/workflows/http3-linux.yml b/.github/workflows/http3-linux.yml
index 99e99332a..3fec31519 100644
--- a/.github/workflows/http3-linux.yml
+++ b/.github/workflows/http3-linux.yml
@@ -56,7 +56,7 @@ env:
# renovate: datasource=github-tags depName=nghttp2/nghttp2 versioning=semver registryUrl=https://github.com
NGHTTP2_VERSION: 1.66.0
# renovate: datasource=github-tags depName=cloudflare/quiche versioning=semver registryUrl=https://github.com
- QUICHE_VERSION: 0.24.4
+ QUICHE_VERSION: 0.24.5
jobs:
build-cache:
@@ -65,7 +65,7 @@ jobs:
steps:
- name: 'cache openssl'
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-openssl-http3
env:
cache-name: cache-openssl-http3
@@ -74,7 +74,7 @@ jobs:
key: ${{ runner.os }}-http3-build-${{ env.cache-name }}-${{ env.OPENSSL_VERSION }}
- name: 'cache quictls'
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-quictls-no-deprecated
env:
cache-name: cache-quictls-no-deprecated
@@ -83,7 +83,7 @@ jobs:
key: ${{ runner.os }}-http3-build-${{ env.cache-name }}-${{ env.QUICTLS_VERSION }}-quic1
- name: 'cache gnutls'
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-gnutls
env:
cache-name: cache-gnutls
@@ -92,7 +92,7 @@ jobs:
key: ${{ runner.os }}-http3-build-${{ env.cache-name }}-${{ env.GNUTLS_VERSION }}
- name: 'cache wolfssl'
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-wolfssl
env:
cache-name: cache-wolfssl
@@ -101,7 +101,7 @@ jobs:
key: ${{ runner.os }}-http3-build-${{ env.cache-name }}-${{ env.WOLFSSL_VERSION }}
- name: 'cache nghttp3'
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-nghttp3
env:
cache-name: cache-nghttp3
@@ -110,7 +110,7 @@ jobs:
key: ${{ runner.os }}-http3-build-${{ env.cache-name }}-${{ env.NGHTTP3_VERSION }}
- name: 'cache ngtcp2'
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-ngtcp2
env:
cache-name: cache-ngtcp2
@@ -119,7 +119,7 @@ jobs:
key: ${{ runner.os }}-http3-build-${{ env.cache-name }}-${{ env.NGTCP2_VERSION }}-${{ env.OPENSSL_VERSION }}-${{ env.QUICTLS_VERSION }}-${{ env.GNUTLS_VERSION }}-${{ env.WOLFSSL_VERSION }}
- name: 'cache nghttp2'
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-nghttp2
env:
cache-name: cache-nghttp2
@@ -350,7 +350,7 @@ jobs:
- name: 'cache openssl'
if: ${{ matrix.build.name == 'openssl' || matrix.build.name == 'openssl-quic' }}
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-openssl-http3
env:
cache-name: cache-openssl-http3
@@ -360,7 +360,7 @@ jobs:
fail-on-cache-miss: true
- name: 'cache quictls'
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-quictls-no-deprecated
env:
cache-name: cache-quictls-no-deprecated
@@ -371,7 +371,7 @@ jobs:
- name: 'cache gnutls'
if: ${{ matrix.build.name == 'gnutls' }}
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-gnutls
env:
cache-name: cache-gnutls
@@ -382,7 +382,7 @@ jobs:
- name: 'cache wolfssl'
if: ${{ matrix.build.name == 'wolfssl' }}
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-wolfssl
env:
cache-name: cache-wolfssl
@@ -392,7 +392,7 @@ jobs:
fail-on-cache-miss: true
- name: 'cache nghttp3'
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-nghttp3
env:
cache-name: cache-nghttp3
@@ -402,7 +402,7 @@ jobs:
fail-on-cache-miss: true
- name: 'cache ngtcp2'
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-ngtcp2
env:
cache-name: cache-ngtcp2
@@ -412,7 +412,7 @@ jobs:
fail-on-cache-miss: true
- name: 'cache nghttp2'
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-nghttp2
env:
cache-name: cache-nghttp2
@@ -423,7 +423,7 @@ jobs:
- name: 'cache quiche'
if: ${{ matrix.build.name == 'quiche' }}
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-quiche
env:
cache-name: cache-quiche
@@ -546,7 +546,7 @@ jobs:
- name: 'build examples'
run: |
if [ "${MATRIX_BUILD}" = 'cmake' ]; then
- cmake --build bld --verbose --target curl-examples
+ cmake --build bld --verbose --target curl-examples-build
else
make -C bld V=1 examples
fi
diff --git a/.github/workflows/linux-old.yml b/.github/workflows/linux-old.yml
index 5b8f92f03..736a3cc10 100644
--- a/.github/workflows/linux-old.yml
+++ b/.github/workflows/linux-old.yml
@@ -123,6 +123,9 @@ jobs:
- name: 'cmake run tests'
run: make -C bld-cares test-ci
+ - name: 'cmake build examples'
+ run: make -C bld-cares curl-examples-build
+
- name: 'autoreconf'
run: autoreconf -if
diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml
index c0be95f43..7c64a2355 100644
--- a/.github/workflows/linux.yml
+++ b/.github/workflows/linux.yml
@@ -325,7 +325,7 @@ jobs:
- name: 'cache libressl'
if: ${{ contains(matrix.build.install_steps, 'libressl') }}
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-libressl
env:
cache-name: cache-libressl
@@ -344,7 +344,7 @@ jobs:
- name: 'cache wolfssl (all)'
if: ${{ contains(matrix.build.install_steps, 'wolfssl-all') }}
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-wolfssl-all
env:
cache-name: cache-wolfssl-all
@@ -365,7 +365,7 @@ jobs:
- name: 'cache wolfssl (opensslextra)' # does support `OPENSSL_COEXIST`
if: ${{ contains(matrix.build.install_steps, 'wolfssl-opensslextra') }}
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-wolfssl-opensslextra
env:
cache-name: cache-wolfssl-opensslextra
@@ -386,7 +386,7 @@ jobs:
- name: 'cache wolfssh'
if: ${{ contains(matrix.build.install_steps, 'wolfssh') }}
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-wolfssh
env:
cache-name: cache-wolfssh
@@ -407,7 +407,7 @@ jobs:
- name: 'cache mbedtls'
if: ${{ contains(matrix.build.install_steps, 'mbedtls') }}
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-mbedtls
env:
cache-name: cache-mbedtls-threadsafe
@@ -430,7 +430,7 @@ jobs:
- name: 'cache openldap-static'
if: ${{ contains(matrix.build.install_steps, 'openldap-static') }}
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-openldap-static
env:
cache-name: cache-openldap-static
@@ -450,7 +450,7 @@ jobs:
- name: 'cache openssl (thread sanitizer)'
if: ${{ contains(matrix.build.install_steps, 'openssl-tsan') }}
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-openssl-tsan
env:
cache-name: cache-openssl-tsan
@@ -469,7 +469,7 @@ jobs:
- name: 'cache quictls'
if: ${{ contains(matrix.build.install_steps, 'quictls') }}
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-quictls
env:
cache-name: cache-quictls
@@ -488,7 +488,7 @@ jobs:
- name: 'cache awslc'
if: ${{ contains(matrix.build.install_steps, 'awslc') }}
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-awslc
env:
cache-name: cache-awslc
@@ -509,7 +509,7 @@ jobs:
- name: 'cache rustls'
if: ${{ contains(matrix.build.install_steps, 'rustls') }}
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-rustls
env:
cache-name: cache-rustls
@@ -698,7 +698,7 @@ jobs:
if: ${{ matrix.build.make-custom-target != 'tidy' }}
run: |
if [ "${MATRIX_BUILD}" = 'cmake' ]; then
- ${MATRIX_MAKE_PREFIX} cmake --build bld --verbose --target curl-examples
+ ${MATRIX_MAKE_PREFIX} cmake --build bld --verbose --target curl-examples-build
else
${MATRIX_MAKE_PREFIX} make -C bld V=1 examples
fi
diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml
index cdb6de283..90d04fab1 100644
--- a/.github/workflows/macos.yml
+++ b/.github/workflows/macos.yml
@@ -111,7 +111,7 @@ jobs:
- name: 'cache libressl'
if: ${{ contains(matrix.build.install_steps, 'libressl') }}
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-libressl
env:
cache-name: cache-libressl
@@ -199,7 +199,7 @@ jobs:
- name: 'build examples'
run: |
if [ "${MATRIX_BUILD}" = 'cmake' ]; then
- cmake --build bld ${MATRIX_OPTIONS} --parallel 4 --target curl-examples --verbose
+ cmake --build bld ${MATRIX_OPTIONS} --parallel 4 --target curl-examples-build --verbose
else
make -C bld examples V=1
fi
@@ -520,7 +520,7 @@ jobs:
if: ${{ contains(matrix.build.name, '+examples') }}
run: |
if [ "${MATRIX_BUILD}" = 'cmake' ]; then
- cmake --build bld --verbose --target curl-examples
+ cmake --build bld --verbose --target curl-examples-build
else
make -C bld examples V=1
fi
diff --git a/.github/workflows/non-native.yml b/.github/workflows/non-native.yml
index e4c837654..e8a105e39 100644
--- a/.github/workflows/non-native.yml
+++ b/.github/workflows/non-native.yml
@@ -83,7 +83,7 @@ jobs:
time cmake --build bld --target test-ci
fi
echo '::group::build examples'
- time cmake --build bld --target curl-examples
+ time cmake --build bld --target curl-examples-build
echo '::endgroup::'
openbsd:
@@ -128,7 +128,7 @@ jobs:
time cmake --build bld --target test-ci
fi
echo '::group::build examples'
- time cmake --build bld --target curl-examples
+ time cmake --build bld --target curl-examples-build
echo '::endgroup::'
freebsd:
@@ -232,7 +232,7 @@ jobs:
if [ "${MATRIX_DESC#*!examples*}" = "${MATRIX_DESC}" ]; then
echo '::group::build examples'
if [ "${MATRIX_BUILD}" = 'cmake' ]; then
- time cmake --build bld --target curl-examples
+ time cmake --build bld --target curl-examples-build
else
time make -C bld examples
fi
@@ -361,7 +361,7 @@ jobs:
- name: 'build examples'
run: |
if [ "${MATRIX_BUILD}" = 'cmake' ]; then
- cmake --build bld --target curl-examples
+ cmake --build bld --target curl-examples-build
else
make -C bld examples
fi
@@ -385,7 +385,7 @@ jobs:
sudo apt-get -o Dpkg::Use-Pty=0 install libfl2
- name: 'cache compiler (djgpp)'
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-compiler
with:
path: ~/djgpp
@@ -473,7 +473,7 @@ jobs:
if: ${{ matrix.build == 'cmake' }} # skip for autotools to save time
run: |
if [ "${MATRIX_BUILD}" = 'cmake' ]; then
- cmake --build bld --target curl-examples
+ cmake --build bld --target curl-examples-build
else
make -C bld examples
fi
diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml
index 812bc9b79..5d9aac713 100644
--- a/.github/workflows/windows.yml
+++ b/.github/workflows/windows.yml
@@ -177,13 +177,13 @@ jobs:
run: |
PATH=/usr/bin
if [ "${MATRIX_BUILD}" = 'cmake' ]; then
- cmake --build bld --verbose --target curl-examples
+ cmake --build bld --verbose --target curl-examples-build
else
make -C bld V=1 examples
fi
- name: 'disk space used'
- run: du -sh .; echo; du -sh -t 250KB ./*; echo; du -h -t 50KB bld
+ run: du -sh .; echo; du -sh -t 250KB ./*; echo; du -h -t 250KB bld
msys2: # both msys and mingw-w64
name: "${{ matrix.sys == 'msys' && 'msys2' || 'mingw' }}, ${{ matrix.build == 'cmake' && 'CM' || 'AM' }} ${{ matrix.env }} ${{ matrix.name }} ${{ matrix.test }}"
@@ -404,13 +404,13 @@ jobs:
timeout-minutes: 5
run: |
if [ "${MATRIX_BUILD}" = 'cmake' ]; then
- cmake --build bld --verbose --target curl-examples
+ cmake --build bld --verbose --target curl-examples-build
else
make -C bld V=1 examples
fi
- name: 'disk space used'
- run: du -sh .; echo; du -sh -t 250KB ./*; echo; du -h -t 50KB bld
+ run: du -sh .; echo; du -sh -t 250KB ./*; echo; du -h -t 250KB bld
mingw-w64-standalone-downloads:
name: 'dl-mingw, CM ${{ matrix.ver }}-${{ matrix.env }} ${{ matrix.name }}'
@@ -479,7 +479,7 @@ jobs:
${{ matrix.install }}
- name: 'cache compiler (gcc ${{ matrix.ver }}-${{ matrix.env }})'
- uses: actions/cache@d4323d4df104b026a6aa633fdb11d772146be0bf # v4
+ uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
id: cache-compiler
with:
path: D:\my-cache
@@ -589,10 +589,10 @@ jobs:
timeout-minutes: 5
run: |
PATH="/d/my-cache/${MATRIX_DIR}/bin:$PATH"
- cmake --build bld --target curl-examples
+ cmake --build bld --target curl-examples-build
- name: 'disk space used'
- run: du -sh .; echo; du -sh -t 250KB ./*; echo; du -h -t 50KB bld
+ run: du -sh .; echo; du -sh -t 250KB ./*; echo; du -h -t 250KB bld
linux-cross-mingw-w64:
name: "linux-mingw, ${{ matrix.build == 'cmake' && 'CM' || 'AM' }} ${{ matrix.compiler }}"
@@ -687,13 +687,13 @@ jobs:
if: ${{ matrix.compiler != 'clang-tidy' }} # Save time by skipping this for clang-tidy
run: |
if [ "${MATRIX_BUILD}" = 'cmake' ]; then
- cmake --build bld --target curl-examples
+ cmake --build bld --target curl-examples-build
else
make -C bld examples
fi
- name: 'disk space used'
- run: du -sh .; echo; du -sh -t 250KB ./*; echo; du -h -t 50KB bld
+ run: du -sh .; echo; du -sh -t 250KB ./*; echo; du -h -t 250KB bld
msvc:
name: 'msvc, CM ${{ matrix.arch }}-${{ matrix.plat }} ${{ matrix.name }}'
@@ -935,7 +935,7 @@ jobs:
- name: 'build examples'
timeout-minutes: 5
if: ${{ contains(matrix.name, '+examples') }}
- run: cmake --build bld --config "${MATRIX_TYPE}" --parallel 5 --target curl-examples
+ run: cmake --build bld --config "${MATRIX_TYPE}" --parallel 5 --target curl-examples-build
- name: 'disk space used'
- run: du -sh .; echo; du -sh -t 250KB ./*; echo; du -h -t 50KB bld
+ run: du -sh .; echo; du -sh -t 250KB ./*; echo; du -h -t 250KB bld
diff --git a/appveyor.sh b/appveyor.sh
index ee4deaf5c..e9871415e 100644
--- a/appveyor.sh
+++ b/appveyor.sh
@@ -163,5 +163,12 @@ fi
if [ "${EXAMPLES}" = 'ON' ] && \
[ "${BUILD_SYSTEM}" = 'CMake' ]; then
- time cmake --build _bld --config "${PRJ_CFG}" --parallel 2 --target curl-examples
+ time cmake --build _bld --config "${PRJ_CFG}" --parallel 2 --target curl-examples-build
+fi
+
+# disk space used
+
+du -sh .; echo; du -sh -t 250KB ./*
+if [ "${BUILD_SYSTEM}" = 'CMake' ]; then
+ echo; du -h -t 250KB _bld
fi
diff --git a/docs/examples/CMakeLists.txt b/docs/examples/CMakeLists.txt
index 0963c62d9..cb1d98389 100644
--- a/docs/examples/CMakeLists.txt
+++ b/docs/examples/CMakeLists.txt
@@ -28,11 +28,42 @@ add_custom_target(curl-examples)
curl_transform_makefile_inc("Makefile.inc" "${CMAKE_CURRENT_BINARY_DIR}/Makefile.inc.cmake")
include("${CMAKE_CURRENT_BINARY_DIR}/Makefile.inc.cmake")
-foreach(_target IN LISTS check_PROGRAMS)
+set(_all_canary "")
+set(_all "all")
+foreach(_target IN LISTS check_PROGRAMS _all) # keep '_all' last
set(_target_name "curl-example-${_target}")
- add_executable(${_target_name} EXCLUDE_FROM_ALL "${_target}.c")
- add_dependencies(curl-examples ${_target_name})
+ if(_target STREQUAL "all")
+ if(CMAKE_VERSION VERSION_GREATER_EQUAL 3.12)
+ set(_examples_c "${check_PROGRAMS}")
+ list(TRANSFORM _examples_c APPEND ".c")
+ add_library(${_target_name} OBJECT EXCLUDE_FROM_ALL ${_examples_c})
+ if(MSVC AND NOT CMAKE_C_COMPILER_ID STREQUAL "Clang")
+ # CMake generates a static library for the OBJECT target. Silence these 'lib.exe' warnings:
+ # warning LNK4006: main already defined in ....obj; second definition ignored
+ # warning LNK4221: This object file does not define any previously undefined public symbols,
+ # so it will not be used by any link operation that consumes this library
+ if(CMAKE_VERSION VERSION_GREATER_EQUAL 3.13)
+ set_target_properties(${_target_name} PROPERTIES STATIC_LIBRARY_OPTIONS "-ignore:4006;-ignore:4221")
+ else()
+ set_target_properties(${_target_name} PROPERTIES STATIC_LIBRARY_FLAGS "-ignore:4006 -ignore:4221")
+ endif()
+ endif()
+ else()
+ set(_examples_c "")
+ foreach(_src IN LISTS check_PROGRAMS)
+ list(APPEND _examples_c "${_src}.c")
+ endforeach()
+ add_library(${_target_name} STATIC EXCLUDE_FROM_ALL ${_examples_c})
+ endif()
+ add_custom_target(curl-examples-build) # Special target to compile all tests quickly and build a single test to probe linkage
+ add_dependencies(curl-examples-build ${_target_name} ${_all_canary}) # Include a full build of a single test
+ else()
+ set(_all_canary ${_target_name}) # Save the last test for the curl-examples-build target
+ add_executable(${_target_name} EXCLUDE_FROM_ALL "${_target}.c")
+ add_dependencies(curl-examples ${_target_name})
+ endif()
target_link_libraries(${_target_name} ${LIB_SELECTED} ${CURL_NETWORK_AND_TIME_LIBS})
- target_compile_definitions(${_target_name} PRIVATE "CURL_NO_OLDIES" "$<$<BOOL:${MSVC}>:_CRT_SECURE_NO_DEPRECATE>")
+ target_compile_definitions(${_target_name} PRIVATE "CURL_NO_OLDIES"
+ "$<$<BOOL:${WIN32}>:WIN32_LEAN_AND_MEAN>" "$<$<BOOL:${MSVC}>:_CRT_SECURE_NO_DEPRECATE>")
set_target_properties(${_target_name} PROPERTIES OUTPUT_NAME "${_target}" PROJECT_LABEL "Example ${_target}" UNITY_BUILD OFF)
endforeach()
diff --git a/lib/vtls/openssl.c b/lib/vtls/openssl.c
index a2ab831f2..dc4a6d122 100644
--- a/lib/vtls/openssl.c
+++ b/lib/vtls/openssl.c
@@ -3327,8 +3327,10 @@ static CURLcode import_windows_cert_store(struct Curl_easy *data,
continue;
x509 = d2i_X509(NULL, &encoded_cert, (long)pContext->cbCertEncoded);
- if(!x509)
+ if(!x509) {
+ ERR_clear_error();
continue;
+ }
/* Try to import the certificate. This may fail for legitimate
reasons such as duplicate certificate, which is allowed by MS but
diff --git a/lib/vtls/schannel.c b/lib/vtls/schannel.c
index 27aceaba8..0b3ec8cc2 100644
--- a/lib/vtls/schannel.c
+++ b/lib/vtls/schannel.c
@@ -730,6 +730,7 @@ schannel_acquire_credential_handle(struct Curl_cfilter *cf,
if(!client_certs[0]) {
/* CRYPT_E_NOT_FOUND / E_INVALIDARG */
CertCloseStore(cert_store, 0);
+ failf(data, "schannel: client cert not found in cert store");
return CURLE_SSL_CERTPROBLEM;
}
}
diff --git a/src/tool_cfgable.c b/src/tool_cfgable.c
index b6c8bf5c8..015ee26cb 100644
--- a/src/tool_cfgable.c
+++ b/src/tool_cfgable.c
@@ -52,6 +52,7 @@ struct OperationConfig *config_alloc(void)
config->ftp_skip_ip = TRUE;
config->file_clobber_mode = CLOBBER_DEFAULT;
config->upload_flags = CURLULFLAG_SEEN;
+ config->retry_delay_ms = RETRY_SLEEP_DEFAULT;
curlx_dyn_init(&config->postdata, MAX_FILE2MEMORY);
return config;
}
diff --git a/src/tool_cfgable.h b/src/tool_cfgable.h
index 40cc52b8d..f982d22d6 100644
--- a/src/tool_cfgable.h
+++ b/src/tool_cfgable.h
@@ -68,17 +68,15 @@ struct State {
struct getout *urlnode;
struct URLGlob inglob;
struct URLGlob urlglob;
- char *outfiles;
char *httpgetfields;
char *uploadfile;
- curl_off_t infilenum; /* number of files to upload */
- curl_off_t up; /* upload file counter within a single upload glob */
+ curl_off_t upnum; /* number of files to upload */
+ curl_off_t upidx; /* index for upload glob */
curl_off_t urlnum; /* how many iterations this URL has with ranges etc */
- curl_off_t li; /* index for globbed URLs */
+ curl_off_t urlidx; /* index for globbed URLs */
};
struct OperationConfig {
- struct State state; /* for create_transfer() */
struct dynbuf postdata;
char *useragent;
struct curl_slist *cookies; /* cookies to serialize into a single line */
@@ -342,6 +340,7 @@ struct OperationConfig {
};
struct GlobalConfig {
+ struct State state; /* for create_transfer() */
char *trace_dump; /* file to dump the network trace to */
FILE *trace_stream;
char *libcurl; /* Output libcurl code to this filename */
diff --git a/src/tool_operate.c b/src/tool_operate.c
index ab1e898c9..90e1fbf86 100644
--- a/src/tool_operate.c
+++ b/src/tool_operate.c
@@ -110,10 +110,6 @@ extern const unsigned char curl_ca_embed[];
"this situation and\nhow to fix it, please visit the webpage mentioned " \
"above.\n"
-static CURLcode single_transfer(struct OperationConfig *config,
- CURLSH *share,
- bool *added,
- bool *skipped);
static CURLcode create_transfer(CURLSH *share,
bool *added,
bool *skipped);
@@ -334,15 +330,11 @@ static CURLcode pre_transfer(struct per_transfer *per)
return result;
}
-void single_transfer_cleanup(struct OperationConfig *config)
+void single_transfer_cleanup(void)
{
- struct State *state;
- DEBUGASSERT(config);
-
- state = &config->state;
+ struct State *state = &global->state;
/* Free list of remaining URLs */
glob_cleanup(&state->urlglob);
- state->outfiles = NULL;
tool_safefree(state->uploadfile);
/* Free list of globbed upload files */
glob_cleanup(&state->inglob);
@@ -847,11 +839,8 @@ static CURLcode etag_store(struct OperationConfig *config,
if(strcmp(config->etag_save_file, "-")) {
FILE *newfile = fopen(config->etag_save_file, "ab");
if(!newfile) {
- struct State *state = &config->state;
warnf("Failed creating file for saving etags: \"%s\". "
"Skip this transfer", config->etag_save_file);
- state->outfiles = NULL;
- glob_cleanup(&state->urlglob);
*skip = TRUE;
return CURLE_OK;
}
@@ -931,7 +920,7 @@ static CURLcode setup_outfile(struct OperationConfig *config,
* We have specified a filename to store the result in, or we have
* decided we want to use the remote filename.
*/
- struct State *state = &config->state;
+ struct State *state = &global->state;
if(!per->outfile) {
/* extract the filename from the URL */
@@ -1085,14 +1074,12 @@ static void check_stdin_upload(struct OperationConfig *config,
/* create the next (singular) transfer */
static CURLcode single_transfer(struct OperationConfig *config,
- CURLSH *share,
- bool *added,
- bool *skipped)
+ CURLSH *share, bool *added, bool *skipped)
{
CURLcode result = CURLE_OK;
bool orig_noprogress = global->noprogress;
bool orig_isatty = global->isatty;
- struct State *state = &config->state;
+ struct State *state = &global->state;
char *httpgetfields = state->httpgetfields;
*skipped = *added = FALSE; /* not yet */
@@ -1119,41 +1106,26 @@ static CURLcode single_transfer(struct OperationConfig *config,
if(!state->urlnode) {
/* first time caller, setup things */
state->urlnode = config->url_list;
- state->infilenum = 1;
+ state->upnum = 1;
}
while(state->urlnode) {
+ struct per_transfer *per = NULL;
+ struct OutStruct *outs;
+ struct OutStruct *heads;
+ struct HdrCbData *hdrcbdata = NULL;
+ struct OutStruct etag_first;
+ CURL *curl;
struct getout *u = state->urlnode;
+ FILE *err = (!global->silent || global->showerror) ? tool_stderr : NULL;
- /* u->url is the full URL or NULL */
- if(!u->url) {
- /* This node has no URL. End of the road. */
- warnf("Got more output options than URLs");
- break;
- }
-
- /* save outfile pattern before expansion */
- if(u->outfile && !state->outfiles)
- state->outfiles = u->outfile;
-
- if(!config->globoff && u->infile && !glob_inuse(&state->inglob)) {
- /* Unless explicitly shut off */
- result = glob_url(&state->inglob, u->infile, &state->infilenum,
- (!global->silent || global->showerror) ?
- tool_stderr : NULL);
- if(result)
- return result;
- }
-
-
- if(state->up || u->infile) {
+ if(u->infile) {
+ if(!config->globoff && !glob_inuse(&state->inglob))
+ result = glob_url(&state->inglob, u->infile, &state->upnum, err);
if(!state->uploadfile) {
- if(glob_inuse(&state->inglob)) {
+ if(glob_inuse(&state->inglob))
result = glob_next_url(&state->uploadfile, &state->inglob);
- if(result == CURLE_OUT_OF_MEMORY)
- errorf("out of memory");
- }
- else if(!state->up) {
+ else if(!state->upidx) {
/* copy the allocated string */
state->uploadfile = u->infile;
u->infile = NULL;
@@ -1163,13 +1135,25 @@ static CURLcode single_transfer(struct OperationConfig *config,
return result;
}
+ if(state->upidx >= state->upnum) {
+ state->urlnum = 0;
+ tool_safefree(state->uploadfile);
+ glob_cleanup(&state->inglob);
+ state->upidx = 0;
+ state->urlnode = u->next; /* next node */
+ if(state->urlnode && !state->urlnode->url) {
+ /* This node has no URL. End of the road. */
+ warnf("Got more output options than URLs");
+ break;
+ }
+ continue;
+ }
+
if(!state->urlnum) {
if(!config->globoff && !u->noglob) {
/* Unless explicitly shut off, we expand '{...}' and '[...]'
expressions and return total number of URLs in pattern set */
- result = glob_url(&state->urlglob, u->url, &state->urlnum,
- (!global->silent || global->showerror) ?
- tool_stderr : NULL);
+ result = glob_url(&state->urlglob, u->url, &state->urlnum, err);
if(result)
return result;
}
@@ -1177,206 +1161,174 @@ static CURLcode single_transfer(struct OperationConfig *config,
state->urlnum = 1; /* without globbing, this is a single URL */
}
- if(state->up < state->infilenum) {
- struct per_transfer *per = NULL;
- struct OutStruct *outs;
- struct OutStruct *heads;
- struct OutStruct *etag_save;
- struct HdrCbData *hdrcbdata = NULL;
- struct OutStruct etag_first;
- CURL *curl;
-
- /* --etag-save */
- memset(&etag_first, 0, sizeof(etag_first));
- etag_save = &etag_first;
- etag_save->stream = stdout;
+ /* --etag-save */
+ memset(&etag_first, 0, sizeof(etag_first));
+ etag_first.stream = stdout;
- /* --etag-compare */
- if(config->etag_compare_file) {
- result = etag_compare(config);
- if(result)
- return result;
- }
+ /* --etag-compare */
+ if(config->etag_compare_file) {
+ result = etag_compare(config);
+ if(result)
+ return result;
+ }
- if(config->etag_save_file) {
- bool badetag = FALSE;
- result = etag_store(config, etag_save, &badetag);
- if(result || badetag)
- break;
- }
+ if(config->etag_save_file) {
+ bool badetag = FALSE;
+ result = etag_store(config, &etag_first, &badetag);
+ if(result || badetag)
+ break;
+ }
- curl = curl_easy_init();
- if(curl)
- result = add_per_transfer(&per);
- else
- result = CURLE_OUT_OF_MEMORY;
- if(result) {
+ curl = curl_easy_init();
+ if(curl)
+ result = add_per_transfer(&per);
+ else
+ result = CURLE_OUT_OF_MEMORY;
+ if(result) {
+ curl_easy_cleanup(curl);
+ if(etag_first.fopened)
+ fclose(etag_first.stream);
+ return result;
+ }
+ per->etag_save = etag_first; /* copy the whole struct */
+ if(state->uploadfile) {
+ per->uploadfile = strdup(state->uploadfile);
+ if(!per->uploadfile ||
+ SetHTTPrequest(TOOL_HTTPREQ_PUT, &config->httpreq)) {
+ tool_safefree(per->uploadfile);
curl_easy_cleanup(curl);
- if(etag_save->fopened)
- fclose(etag_save->stream);
- return result;
- }
- per->etag_save = etag_first; /* copy the whole struct */
- if(state->uploadfile) {
- per->uploadfile = strdup(state->uploadfile);
- if(!per->uploadfile ||
- SetHTTPrequest(TOOL_HTTPREQ_PUT, &config->httpreq)) {
- tool_safefree(per->uploadfile);
- curl_easy_cleanup(curl);
- return CURLE_FAILED_INIT;
- }
+ return CURLE_FAILED_INIT;
}
- per->config = config;
- per->curl = curl;
- per->urlnum = u->num;
-
- /* default headers output stream is stdout */
- heads = &per->heads;
- heads->stream = stdout;
+ }
+ per->config = config;
+ per->curl = curl;
+ per->urlnum = u->num;
- /* Single header file for all URLs */
- if(config->headerfile) {
- result = setup_headerfile(config, per, heads);
- if(result)
- return result;
- }
- hdrcbdata = &per->hdrcbdata;
+ /* default headers output stream is stdout */
+ heads = &per->heads;
+ heads->stream = stdout;
- outs = &per->outs;
+ /* Single header file for all URLs */
+ if(config->headerfile) {
+ result = setup_headerfile(config, per, heads);
+ if(result)
+ return result;
+ }
+ hdrcbdata = &per->hdrcbdata;
- per->outfile = NULL;
- per->infdopen = FALSE;
- per->infd = STDIN_FILENO;
+ outs = &per->outs;
- /* default output stream is stdout */
- outs->stream = stdout;
+ per->outfile = NULL;
+ per->infdopen = FALSE;
+ per->infd = STDIN_FILENO;
- if(glob_inuse(&state->urlglob)) {
- result = glob_next_url(&per->url, &state->urlglob);
- if(result)
- return result;
- }
- else if(!state->li) {
- per->url = strdup(u->url);
- if(!per->url)
- return CURLE_OUT_OF_MEMORY;
- }
- else {
- per->url = NULL;
- break;
- }
+ /* default output stream is stdout */
+ outs->stream = stdout;
- if(state->outfiles) {
- per->outfile = strdup(state->outfiles);
- if(!per->outfile)
- return CURLE_OUT_OF_MEMORY;
- }
+ if(glob_inuse(&state->urlglob))
+ result = glob_next_url(&per->url, &state->urlglob);
+ else if(!state->urlidx) {
+ per->url = strdup(u->url);
+ if(!per->url)
+ result = CURLE_OUT_OF_MEMORY;
+ }
+ else {
+ per->url = NULL;
+ break;
+ }
+ if(result)
+ return result;
- outs->out_null = u->out_null;
- if(!outs->out_null && (u->useremote ||
- (per->outfile && strcmp("-", per->outfile)))) {
- result = setup_outfile(config, per, outs, skipped);
- if(result)
- return result;
- }
+ if(u->outfile) {
+ per->outfile = strdup(u->outfile);
+ if(!per->outfile)
+ return CURLE_OUT_OF_MEMORY;
+ }
- if(per->uploadfile) {
-
- if(stdin_upload(per->uploadfile))
- check_stdin_upload(config, per);
- else {
- /*
- * We have specified a file to upload and it is not "-".
- */
- result = add_file_name_to_url(per->curl, &per->url,
- per->uploadfile);
- if(result)
- return result;
- }
- }
+ outs->out_null = u->out_null;
+ if(!outs->out_null &&
+ (u->useremote || (per->outfile && strcmp("-", per->outfile)))) {
+ result = setup_outfile(config, per, outs, skipped);
+ if(result)
+ return result;
+ }
- if(per->uploadfile && config->resume_from_current)
- config->resume_from = -1; /* -1 will then force get-it-yourself */
+ if(per->uploadfile) {
- if(output_expected(per->url, per->uploadfile) && outs->stream &&
- isatty(fileno(outs->stream)))
- /* we send the output to a tty, therefore we switch off the progress
- meter */
- per->noprogress = global->noprogress = global->isatty = TRUE;
+ if(stdin_upload(per->uploadfile))
+ check_stdin_upload(config, per);
else {
- /* progress meter is per download, so restore config
- values */
- per->noprogress = global->noprogress = orig_noprogress;
- global->isatty = orig_isatty;
- }
-
- if(httpgetfields || config->query) {
- result = append2query(config, per,
- httpgetfields ? httpgetfields : config->query);
+ /*
+ * We have specified a file to upload and it is not "-".
+ */
+ result = add_file_name_to_url(per->curl, &per->url,
+ per->uploadfile);
if(result)
return result;
}
- if((!per->outfile || !strcmp(per->outfile, "-")) &&
- !config->use_ascii) {
- /* We get the output to stdout and we have not got the ASCII/text
- flag, then set stdout to be binary */
- CURLX_SET_BINMODE(stdout);
- }
-
- /* explicitly passed to stdout means okaying binary gunk */
- config->terminal_binary_ok =
- (per->outfile && !strcmp(per->outfile, "-"));
-
- if(config->content_disposition && u->useremote)
- hdrcbdata->honor_cd_filename = TRUE;
- else
- hdrcbdata->honor_cd_filename = FALSE;
+ if(config->resume_from_current)
+ config->resume_from = -1; /* -1 will then force get-it-yourself */
+ }
- hdrcbdata->outs = outs;
- hdrcbdata->heads = heads;
- hdrcbdata->etag_save = etag_save;
- hdrcbdata->config = config;
+ if(output_expected(per->url, per->uploadfile) && outs->stream &&
+ isatty(fileno(outs->stream)))
+ /* we send the output to a tty, therefore we switch off the progress
+ meter */
+ per->noprogress = global->noprogress = global->isatty = TRUE;
+ else {
+ /* progress meter is per download, so restore config
+ values */
+ per->noprogress = global->noprogress = orig_noprogress;
+ global->isatty = orig_isatty;
+ }
- result = config2setopts(config, per, curl, share);
+ if(httpgetfields || config->query) {
+ result = append2query(config, per,
+ httpgetfields ? httpgetfields : config->query);
if(result)
return result;
+ }
- /* initialize retry vars for loop below */
- per->retry_sleep_default = config->retry_delay_ms ?
- config->retry_delay_ms : RETRY_SLEEP_DEFAULT; /* ms */
- per->retry_remaining = config->req_retry;
- per->retry_sleep = per->retry_sleep_default; /* ms */
- per->retrystart = curlx_now();
-
- state->li++;
- /* Here's looping around each globbed URL */
- if(state->li >= state->urlnum) {
- state->li = 0;
- state->urlnum = 0; /* forced reglob of URLs */
- glob_cleanup(&state->urlglob);
- state->up++;
- tool_safefree(state->uploadfile); /* clear it to get the next */
- }
- *added = TRUE;
- break;
+ if((!per->outfile || !strcmp(per->outfile, "-")) &&
+ !config->use_ascii) {
+ /* We get the output to stdout and we have not got the ASCII/text flag,
+ then set stdout to be binary */
+ CURLX_SET_BINMODE(stdout);
}
- /* Free this URL node data without destroying the
- node itself nor modifying next pointer. */
- u->outset = u->urlset = u->useremote =
- u->uploadset = u->noupload = u->noglob = FALSE;
- glob_cleanup(&state->urlglob);
- state->urlnum = 0;
+ /* explicitly passed to stdout means okaying binary gunk */
+ config->terminal_binary_ok =
+ (per->outfile && !strcmp(per->outfile, "-"));
+
+ hdrcbdata->honor_cd_filename =
+ (config->content_disposition && u->useremote);
+ hdrcbdata->outs = outs;
+ hdrcbdata->heads = heads;
+ hdrcbdata->etag_save = &etag_first;
+ hdrcbdata->config = config;
+
+ result = config2setopts(config, per, curl, share);
+ if(result)
+ return result;
+
+ /* initialize retry vars for loop below */
+ per->retry_sleep_default = config->retry_delay_ms;
+ per->retry_remaining = config->req_retry;
+ per->retry_sleep = per->retry_sleep_default; /* ms */
+ per->retrystart = curlx_now();
- state->outfiles = NULL;
- tool_safefree(state->uploadfile);
- /* Free list of globbed upload files */
- glob_cleanup(&state->inglob);
- state->up = 0;
- state->urlnode = u->next; /* next node */
+ state->urlidx++;
+ /* Here's looping around each globbed URL */
+ if(state->urlidx >= state->urlnum) {
+ state->urlidx = state->urlnum = 0;
+ glob_cleanup(&state->urlglob);
+ state->upidx++;
+ tool_safefree(state->uploadfile); /* clear it to get the next */
+ }
+ *added = TRUE;
+ break;
}
- state->outfiles = NULL;
return result;
}
@@ -1978,8 +1930,8 @@ static CURLcode serial_transfers(CURLSH *share)
/* returncode errors have priority */
result = returncode;
- if(result && global->current)
- single_transfer_cleanup(global->current);
+ if(result)
+ single_transfer_cleanup();
return result;
}
@@ -2114,7 +2066,7 @@ static CURLcode transfer_per_config(struct OperationConfig *config,
if(!result) {
result = single_transfer(config, share, added, skipped);
if(!*added || result)
- single_transfer_cleanup(config);
+ single_transfer_cleanup();
}
return result;
diff --git a/src/tool_operate.h b/src/tool_operate.h
index ec4204486..a323271b7 100644
--- a/src/tool_operate.h
+++ b/src/tool_operate.h
@@ -80,7 +80,7 @@ struct per_transfer {
};
CURLcode operate(int argc, argv_item_t argv[]);
-void single_transfer_cleanup(struct OperationConfig *config);
+void single_transfer_cleanup(void);
extern struct per_transfer *transfers; /* first node */
diff --git a/src/tool_operhlp.c b/src/tool_operhlp.c
index e25a3cf60..fdf647236 100644
--- a/src/tool_operhlp.c
+++ b/src/tool_operhlp.c
@@ -45,7 +45,7 @@ void clean_getout(struct OperationConfig *config)
node = next;
}
config->url_list = NULL;
- single_transfer_cleanup(config);
+ single_transfer_cleanup();
}
}
diff --git a/src/tool_parsecfg.c b/src/tool_parsecfg.c
index d5303e679..bc22b9d5b 100644
--- a/src/tool_parsecfg.c
+++ b/src/tool_parsecfg.c
@@ -36,7 +36,47 @@
specified with an initial dash! */
#define ISSEP(x,dash) (!dash && (((x) == '=') || ((x) == ':')))
-static const char *unslashquote(const char *line, char *param);
+/*
+ * Copies the string from line to the param dynbuf, unquoting backslash-quoted
+ * characters and null-terminating the output string. Stops at the first
+ * non-backslash-quoted double quote character or the end of the input string.
+ * param must be at least as long as the input string. Returns 0 on success.
+ */
+static int unslashquote(const char *line, struct dynbuf *param)
+{
+ curlx_dyn_reset(param);
+
+ while(*line && (*line != '\"')) {
+ if(*line == '\\') {
+ char out;
+ line++;
+
+ /* default is to output the letter after the backslash */
+ switch(out = *line) {
+ case '\0':
+ continue; /* this'll break out of the loop */
+ case 't':
+ out = '\t';
+ break;
+ case 'n':
+ out = '\n';
+ break;
+ case 'r':
+ out = '\r';
+ break;
+ case 'v':
+ out = '\v';
+ break;
+ }
+ if(curlx_dyn_addn(param, &out, 1))
+ return 1;
+ line++;
+ }
+ else if(curlx_dyn_addn(param, line++, 1))
+ return 1;
+ }
+ return 0; /* ok */
+}
#define MAX_CONFIG_LINE_LENGTH (10*1024*1024)
@@ -87,13 +127,14 @@ int parseconfig(const char *filename)
int lineno = 0;
bool dashed_option;
struct dynbuf buf;
+ struct dynbuf pbuf;
bool fileerror = FALSE;
curlx_dyn_init(&buf, MAX_CONFIG_LINE_LENGTH);
+ curlx_dyn_init(&pbuf, MAX_CONFIG_LINE_LENGTH);
DEBUGASSERT(filename);
while(!rc && my_get_line(file, &buf, &fileerror)) {
ParameterError res;
- bool alloced_param = FALSE;
lineno++;
line = curlx_dyn_ptr(&buf);
if(!line) {
@@ -125,15 +166,10 @@ int parseconfig(const char *filename)
/* the parameter starts here (unless quoted) */
if(*line == '\"') {
/* quoted parameter, do the quote dance */
- line++;
- param = malloc(strlen(line) + 1); /* parameter */
- if(!param) {
- /* out of memory */
- rc = 1;
+ rc = unslashquote(++line, &pbuf);
+ if(rc)
break;
- }
- alloced_param = TRUE;
- (void)unslashquote(line, param);
+ param = curlx_dyn_len(&pbuf) ? curlx_dyn_ptr(&pbuf) : CURL_UNCONST("");
}
else {
param = line; /* parameter starts here */
@@ -156,10 +192,9 @@ int parseconfig(const char *filename)
case '#': /* comment */
break;
default:
- warnf("%s:%d: warning: '%s' uses unquoted "
- "whitespace", filename, lineno, option);
- warnf("This may cause side-effects. "
- "Consider using double quotes?");
+ warnf("%s:%d: warning: '%s' uses unquoted whitespace. "
+ "This may cause side-effects. Consider double quotes.",
+ filename, lineno, option);
}
}
if(!*param)
@@ -211,11 +246,9 @@ int parseconfig(const char *filename)
rc = (int)res;
}
}
-
- if(alloced_param)
- tool_safefree(param);
}
curlx_dyn_free(&buf);
+ curlx_dyn_free(&pbuf);
if(file != stdin)
fclose(file);
if(fileerror)
@@ -228,46 +261,6 @@ int parseconfig(const char *filename)
return rc;
}
-/*
- * Copies the string from line to the buffer at param, unquoting
- * backslash-quoted characters and null-terminating the output string. Stops
- * at the first non-backslash-quoted double quote character or the end of the
- * input string. param must be at least as long as the input string. Returns
- * the pointer after the last handled input character.
- */
-static const char *unslashquote(const char *line, char *param)
-{
- while(*line && (*line != '\"')) {
- if(*line == '\\') {
- char out;
- line++;
-
- /* default is to output the letter after the backslash */
- switch(out = *line) {
- case '\0':
- continue; /* this'll break out of the loop */
- case 't':
- out = '\t';
- break;
- case 'n':
- out = '\n';
- break;
- case 'r':
- out = '\r';
- break;
- case 'v':
- out = '\v';
- break;
- }
- *param++ = out;
- line++;
- }
- else
- *param++ = *line++;
- }
- *param = '\0'; /* always null-terminate */
- return line;
-}
static bool get_line(FILE *input, struct dynbuf *buf, bool *error)
{
diff --git a/src/tool_writeout.c b/src/tool_writeout.c
index 246971274..0a610de79 100644
--- a/src/tool_writeout.c
+++ b/src/tool_writeout.c
@@ -603,7 +603,7 @@ static const char *outtime(const char *ptr, /* %time{ ... */
if(!result) {
/* !checksrc! disable BANNEDFUNC 1 */
utc = gmtime(&secs);
- if(curlx_dyn_len(&format) &&
+ if(curlx_dyn_len(&format) && utc &&
strftime(output, sizeof(output), curlx_dyn_ptr(&format), utc))
fputs(output, stream);
curlx_dyn_free(&format);
diff --git a/tests/data/test459 b/tests/data/test459
index e46d02973..198e67d2a 100644
--- a/tests/data/test459
+++ b/tests/data/test459
@@ -56,8 +56,8 @@ Content-Type: application/x-www-form-urlencoded
arg
</protocol>
<stderr mode="text">
-Warning: %LOGDIR/config:1: warning: 'data' uses unquoted whitespace
-Warning: This may cause side-effects. Consider using double quotes?
+Warning: %LOGDIR/config:1: warning: 'data' uses unquoted whitespace. This may
+Warning: cause side-effects. Consider double quotes.
</stderr>
</verify>
</testcase>