summaryrefslogtreecommitdiff
path: root/t/unit-tests/clar/test
diff options
context:
space:
mode:
Diffstat (limited to 't/unit-tests/clar/test')
-rw-r--r--t/unit-tests/clar/test/CMakeLists.txt34
-rw-r--r--t/unit-tests/clar/test/clar_test.h16
-rw-r--r--t/unit-tests/clar/test/expected/help12
-rw-r--r--t/unit-tests/clar/test/expected/quiet44
-rw-r--r--t/unit-tests/clar/test/expected/specific_test9
-rw-r--r--t/unit-tests/clar/test/expected/stop_on_failure8
-rw-r--r--t/unit-tests/clar/test/expected/suite_names2
-rw-r--r--t/unit-tests/clar/test/expected/summary.xml41
-rw-r--r--t/unit-tests/clar/test/expected/summary_with_filename49
-rw-r--r--t/unit-tests/clar/test/expected/summary_without_filename49
-rw-r--r--t/unit-tests/clar/test/expected/tap92
-rw-r--r--t/unit-tests/clar/test/expected/without_arguments48
-rw-r--r--t/unit-tests/clar/test/main.c41
-rw-r--r--t/unit-tests/clar/test/main.c.sample27
-rw-r--r--t/unit-tests/clar/test/selftest.c370
-rw-r--r--t/unit-tests/clar/test/selftest.h3
-rw-r--r--t/unit-tests/clar/test/suites/CMakeLists.txt53
-rw-r--r--t/unit-tests/clar/test/suites/combined.c (renamed from t/unit-tests/clar/test/sample.c)37
-rw-r--r--t/unit-tests/clar/test/suites/main.c27
-rw-r--r--t/unit-tests/clar/test/suites/pointer.c13
-rw-r--r--t/unit-tests/clar/test/suites/resources/test/file (renamed from t/unit-tests/clar/test/resources/test/file)0
21 files changed, 874 insertions, 101 deletions
diff --git a/t/unit-tests/clar/test/CMakeLists.txt b/t/unit-tests/clar/test/CMakeLists.txt
index 7f2c1dc17a..f240166439 100644
--- a/t/unit-tests/clar/test/CMakeLists.txt
+++ b/t/unit-tests/clar/test/CMakeLists.txt
@@ -2,12 +2,12 @@ find_package(Python COMPONENTS Interpreter REQUIRED)
add_custom_command(OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/clar.suite"
COMMAND "${Python_EXECUTABLE}" "${CMAKE_SOURCE_DIR}/generate.py" --output "${CMAKE_CURRENT_BINARY_DIR}"
- DEPENDS main.c sample.c clar_test.h
+ DEPENDS main.c selftest.c
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
)
-add_executable(clar_test)
-set_target_properties(clar_test PROPERTIES
+add_executable(selftest)
+set_target_properties(selftest PROPERTIES
C_STANDARD 90
C_STANDARD_REQUIRED ON
C_EXTENSIONS OFF
@@ -15,25 +15,35 @@ set_target_properties(clar_test PROPERTIES
# MSVC generates all kinds of warnings. We may want to fix these in the future
# and then unconditionally treat warnings as errors.
-if(NOT MSVC)
- set_target_properties(clar_test PROPERTIES
+if (NOT MSVC)
+ set_target_properties(selftest PROPERTIES
COMPILE_WARNING_AS_ERROR ON
)
endif()
-target_sources(clar_test PRIVATE
+target_sources(selftest PRIVATE
main.c
- sample.c
+ selftest.c
"${CMAKE_CURRENT_BINARY_DIR}/clar.suite"
)
-target_compile_definitions(clar_test PRIVATE
- CLAR_FIXTURE_PATH="${CMAKE_CURRENT_SOURCE_DIR}/resources/"
+target_compile_definitions(selftest PRIVATE
+ CLAR_FIXTURE_PATH="${CMAKE_CURRENT_SOURCE_DIR}/expected/"
)
-target_compile_options(clar_test PRIVATE
+target_compile_options(selftest PRIVATE
$<IF:$<CXX_COMPILER_ID:MSVC>,/W4,-Wall>
)
-target_include_directories(clar_test PRIVATE
+target_include_directories(selftest PRIVATE
"${CMAKE_SOURCE_DIR}"
"${CMAKE_CURRENT_BINARY_DIR}"
)
-target_link_libraries(clar_test clar)
+target_link_libraries(selftest clar)
+
+add_test(NAME build_selftest
+ COMMAND "${CMAKE_COMMAND}" --build "${CMAKE_BINARY_DIR}" --config "$<CONFIG>" --target selftest
+)
+set_tests_properties(build_selftest PROPERTIES FIXTURES_SETUP clar_test_fixture)
+
+add_subdirectory(suites)
+
+add_test(NAME selftest COMMAND "${CMAKE_CURRENT_BINARY_DIR}/selftest" $<TARGET_FILE_DIR:combined_suite>)
+set_tests_properties(selftest PROPERTIES FIXTURES_REQUIRED clar_test_fixture)
diff --git a/t/unit-tests/clar/test/clar_test.h b/t/unit-tests/clar/test/clar_test.h
deleted file mode 100644
index 0fcaa639aa..0000000000
--- a/t/unit-tests/clar/test/clar_test.h
+++ /dev/null
@@ -1,16 +0,0 @@
-/*
- * Copyright (c) Vicent Marti. All rights reserved.
- *
- * This file is part of clar, distributed under the ISC license.
- * For full terms see the included COPYING file.
- */
-#ifndef __CLAR_TEST__
-#define __CLAR_TEST__
-
-/* Import the standard clar helper functions */
-#include "clar.h"
-
-/* Your custom shared includes / defines here */
-extern int global_test_counter;
-
-#endif
diff --git a/t/unit-tests/clar/test/expected/help b/t/unit-tests/clar/test/expected/help
new file mode 100644
index 0000000000..9428def2d7
--- /dev/null
+++ b/t/unit-tests/clar/test/expected/help
@@ -0,0 +1,12 @@
+Usage: combined [options]
+
+Options:
+ -sname Run only the suite with `name` (can go to individual test name)
+ -iname Include the suite with `name`
+ -xname Exclude the suite with `name`
+ -v Increase verbosity (show suite names)
+ -q Decrease verbosity, inverse to -v
+ -Q Quit as soon as a test fails
+ -t Display results in tap format
+ -l Print suite names
+ -r[filename] Write summary file (to the optional filename)
diff --git a/t/unit-tests/clar/test/expected/quiet b/t/unit-tests/clar/test/expected/quiet
new file mode 100644
index 0000000000..280c99d8ad
--- /dev/null
+++ b/t/unit-tests/clar/test/expected/quiet
@@ -0,0 +1,44 @@
+ 1) Failure:
+combined::1 [file:42]
+ Function call failed: -1
+
+ 2) Failure:
+combined::2 [file:42]
+ Expression is not true: 100 == 101
+
+ 3) Failure:
+combined::strings [file:42]
+ String mismatch: "mismatched" != actual ("this one fails")
+ 'mismatched' != 'expected' (at byte 0)
+
+ 4) Failure:
+combined::strings_with_length [file:42]
+ String mismatch: "exactly" != actual ("this one fails")
+ 'exa' != 'exp' (at byte 2)
+
+ 5) Failure:
+combined::int [file:42]
+ 101 != value ("extra note on failing test")
+ 101 != 100
+
+ 6) Failure:
+combined::int_fmt [file:42]
+ 022 != value
+ 0022 != 0144
+
+ 7) Failure:
+combined::bool [file:42]
+ 0 != value
+ 0 != 1
+
+ 8) Failure:
+combined::multiline_description [file:42]
+ Function call failed: -1
+ description line 1
+ description line 2
+
+ 9) Failure:
+combined::null_string [file:42]
+ String mismatch: "expected" != actual ("this one fails")
+ 'expected' != NULL
+
diff --git a/t/unit-tests/clar/test/expected/specific_test b/t/unit-tests/clar/test/expected/specific_test
new file mode 100644
index 0000000000..6c22e9f507
--- /dev/null
+++ b/t/unit-tests/clar/test/expected/specific_test
@@ -0,0 +1,9 @@
+Loaded 1 suites:
+Started (test status codes: OK='.' FAILURE='F' SKIPPED='S')
+F
+
+ 1) Failure:
+combined::bool [file:42]
+ 0 != value
+ 0 != 1
+
diff --git a/t/unit-tests/clar/test/expected/stop_on_failure b/t/unit-tests/clar/test/expected/stop_on_failure
new file mode 100644
index 0000000000..c23610754f
--- /dev/null
+++ b/t/unit-tests/clar/test/expected/stop_on_failure
@@ -0,0 +1,8 @@
+Loaded 1 suites:
+Started (test status codes: OK='.' FAILURE='F' SKIPPED='S')
+F
+
+ 1) Failure:
+combined::1 [file:42]
+ Function call failed: -1
+
diff --git a/t/unit-tests/clar/test/expected/suite_names b/t/unit-tests/clar/test/expected/suite_names
new file mode 100644
index 0000000000..10d1538427
--- /dev/null
+++ b/t/unit-tests/clar/test/expected/suite_names
@@ -0,0 +1,2 @@
+Test suites (use -s<name> to run just one):
+ 0: combined
diff --git a/t/unit-tests/clar/test/expected/summary.xml b/t/unit-tests/clar/test/expected/summary.xml
new file mode 100644
index 0000000000..9a89d43a59
--- /dev/null
+++ b/t/unit-tests/clar/test/expected/summary.xml
@@ -0,0 +1,41 @@
+<testsuites>
+ <testsuite id="0" name="selftest" hostname="localhost" timestamp="2024-09-06T10:04:08" tests="8" failures="8" errors="0">
+ <testcase name="1" classname="selftest" time="0.00">
+ <failure type="assert"><![CDATA[Function call failed: -1
+(null)]]></failure>
+ </testcase>
+ <testcase name="2" classname="selftest" time="0.00">
+ <failure type="assert"><![CDATA[Expression is not true: 100 == 101
+(null)]]></failure>
+ </testcase>
+ <testcase name="strings" classname="selftest" time="0.00">
+ <failure type="assert"><![CDATA[String mismatch: "mismatched" != actual ("this one fails")
+'mismatched' != 'expected' (at byte 0)]]></failure>
+ </testcase>
+ <testcase name="strings_with_length" classname="selftest" time="0.00">
+ <failure type="assert"><![CDATA[String mismatch: "exactly" != actual ("this one fails")
+'exa' != 'exp' (at byte 2)]]></failure>
+ </testcase>
+ <testcase name="int" classname="selftest" time="0.00">
+ <failure type="assert"><![CDATA[101 != value ("extra note on failing test")
+101 != 100]]></failure>
+ </testcase>
+ <testcase name="int_fmt" classname="selftest" time="0.00">
+ <failure type="assert"><![CDATA[022 != value
+0022 != 0144]]></failure>
+ </testcase>
+ <testcase name="bool" classname="selftest" time="0.00">
+ <failure type="assert"><![CDATA[0 != value
+0 != 1]]></failure>
+ </testcase>
+ <testcase name="multiline_description" classname="selftest" time="0.00">
+ <failure type="assert"><![CDATA[Function call failed: −1
+description line 1
+description line 2]]></failure>
+ </testcase>
+ <testcase name="null_string" classname="selftest" time="0.00">
+ <failure type="assert"><![CDATA[String mismatch: "expected" != actual ("this one fails")
+'expected' != NULL]]></failure>
+ </testcase>
+ </testsuite>
+</testsuites>
diff --git a/t/unit-tests/clar/test/expected/summary_with_filename b/t/unit-tests/clar/test/expected/summary_with_filename
new file mode 100644
index 0000000000..460160791d
--- /dev/null
+++ b/t/unit-tests/clar/test/expected/summary_with_filename
@@ -0,0 +1,49 @@
+Loaded 1 suites:
+Started (test status codes: OK='.' FAILURE='F' SKIPPED='S')
+FFFFFFFFF
+
+ 1) Failure:
+combined::1 [file:42]
+ Function call failed: -1
+
+ 2) Failure:
+combined::2 [file:42]
+ Expression is not true: 100 == 101
+
+ 3) Failure:
+combined::strings [file:42]
+ String mismatch: "mismatched" != actual ("this one fails")
+ 'mismatched' != 'expected' (at byte 0)
+
+ 4) Failure:
+combined::strings_with_length [file:42]
+ String mismatch: "exactly" != actual ("this one fails")
+ 'exa' != 'exp' (at byte 2)
+
+ 5) Failure:
+combined::int [file:42]
+ 101 != value ("extra note on failing test")
+ 101 != 100
+
+ 6) Failure:
+combined::int_fmt [file:42]
+ 022 != value
+ 0022 != 0144
+
+ 7) Failure:
+combined::bool [file:42]
+ 0 != value
+ 0 != 1
+
+ 8) Failure:
+combined::multiline_description [file:42]
+ Function call failed: -1
+ description line 1
+ description line 2
+
+ 9) Failure:
+combined::null_string [file:42]
+ String mismatch: "expected" != actual ("this one fails")
+ 'expected' != NULL
+
+written summary file to different.xml
diff --git a/t/unit-tests/clar/test/expected/summary_without_filename b/t/unit-tests/clar/test/expected/summary_without_filename
new file mode 100644
index 0000000000..7874c1d98b
--- /dev/null
+++ b/t/unit-tests/clar/test/expected/summary_without_filename
@@ -0,0 +1,49 @@
+Loaded 1 suites:
+Started (test status codes: OK='.' FAILURE='F' SKIPPED='S')
+FFFFFFFFF
+
+ 1) Failure:
+combined::1 [file:42]
+ Function call failed: -1
+
+ 2) Failure:
+combined::2 [file:42]
+ Expression is not true: 100 == 101
+
+ 3) Failure:
+combined::strings [file:42]
+ String mismatch: "mismatched" != actual ("this one fails")
+ 'mismatched' != 'expected' (at byte 0)
+
+ 4) Failure:
+combined::strings_with_length [file:42]
+ String mismatch: "exactly" != actual ("this one fails")
+ 'exa' != 'exp' (at byte 2)
+
+ 5) Failure:
+combined::int [file:42]
+ 101 != value ("extra note on failing test")
+ 101 != 100
+
+ 6) Failure:
+combined::int_fmt [file:42]
+ 022 != value
+ 0022 != 0144
+
+ 7) Failure:
+combined::bool [file:42]
+ 0 != value
+ 0 != 1
+
+ 8) Failure:
+combined::multiline_description [file:42]
+ Function call failed: -1
+ description line 1
+ description line 2
+
+ 9) Failure:
+combined::null_string [file:42]
+ String mismatch: "expected" != actual ("this one fails")
+ 'expected' != NULL
+
+written summary file to summary.xml
diff --git a/t/unit-tests/clar/test/expected/tap b/t/unit-tests/clar/test/expected/tap
new file mode 100644
index 0000000000..bddbd5dfe9
--- /dev/null
+++ b/t/unit-tests/clar/test/expected/tap
@@ -0,0 +1,92 @@
+TAP version 13
+# start of suite 1: combined
+not ok 1 - combined::1
+ ---
+ reason: |
+ Function call failed: -1
+ at:
+ file: 'file'
+ line: 42
+ function: 'func'
+ ---
+not ok 2 - combined::2
+ ---
+ reason: |
+ Expression is not true: 100 == 101
+ at:
+ file: 'file'
+ line: 42
+ function: 'func'
+ ---
+not ok 3 - combined::strings
+ ---
+ reason: |
+ String mismatch: "mismatched" != actual ("this one fails")
+ 'mismatched' != 'expected' (at byte 0)
+ at:
+ file: 'file'
+ line: 42
+ function: 'func'
+ ---
+not ok 4 - combined::strings_with_length
+ ---
+ reason: |
+ String mismatch: "exactly" != actual ("this one fails")
+ 'exa' != 'exp' (at byte 2)
+ at:
+ file: 'file'
+ line: 42
+ function: 'func'
+ ---
+not ok 5 - combined::int
+ ---
+ reason: |
+ 101 != value ("extra note on failing test")
+ 101 != 100
+ at:
+ file: 'file'
+ line: 42
+ function: 'func'
+ ---
+not ok 6 - combined::int_fmt
+ ---
+ reason: |
+ 022 != value
+ 0022 != 0144
+ at:
+ file: 'file'
+ line: 42
+ function: 'func'
+ ---
+not ok 7 - combined::bool
+ ---
+ reason: |
+ 0 != value
+ 0 != 1
+ at:
+ file: 'file'
+ line: 42
+ function: 'func'
+ ---
+not ok 8 - combined::multiline_description
+ ---
+ reason: |
+ Function call failed: -1
+ description line 1
+ description line 2
+ at:
+ file: 'file'
+ line: 42
+ function: 'func'
+ ---
+not ok 9 - combined::null_string
+ ---
+ reason: |
+ String mismatch: "expected" != actual ("this one fails")
+ 'expected' != NULL
+ at:
+ file: 'file'
+ line: 42
+ function: 'func'
+ ---
+1..9
diff --git a/t/unit-tests/clar/test/expected/without_arguments b/t/unit-tests/clar/test/expected/without_arguments
new file mode 100644
index 0000000000..1111d418a0
--- /dev/null
+++ b/t/unit-tests/clar/test/expected/without_arguments
@@ -0,0 +1,48 @@
+Loaded 1 suites:
+Started (test status codes: OK='.' FAILURE='F' SKIPPED='S')
+FFFFFFFFF
+
+ 1) Failure:
+combined::1 [file:42]
+ Function call failed: -1
+
+ 2) Failure:
+combined::2 [file:42]
+ Expression is not true: 100 == 101
+
+ 3) Failure:
+combined::strings [file:42]
+ String mismatch: "mismatched" != actual ("this one fails")
+ 'mismatched' != 'expected' (at byte 0)
+
+ 4) Failure:
+combined::strings_with_length [file:42]
+ String mismatch: "exactly" != actual ("this one fails")
+ 'exa' != 'exp' (at byte 2)
+
+ 5) Failure:
+combined::int [file:42]
+ 101 != value ("extra note on failing test")
+ 101 != 100
+
+ 6) Failure:
+combined::int_fmt [file:42]
+ 022 != value
+ 0022 != 0144
+
+ 7) Failure:
+combined::bool [file:42]
+ 0 != value
+ 0 != 1
+
+ 8) Failure:
+combined::multiline_description [file:42]
+ Function call failed: -1
+ description line 1
+ description line 2
+
+ 9) Failure:
+combined::null_string [file:42]
+ String mismatch: "expected" != actual ("this one fails")
+ 'expected' != NULL
+
diff --git a/t/unit-tests/clar/test/main.c b/t/unit-tests/clar/test/main.c
index 59e56ad255..94af440643 100644
--- a/t/unit-tests/clar/test/main.c
+++ b/t/unit-tests/clar/test/main.c
@@ -1,23 +1,9 @@
-/*
- * Copyright (c) Vicent Marti. All rights reserved.
- *
- * This file is part of clar, distributed under the ISC license.
- * For full terms see the included COPYING file.
- */
+#include <stdio.h>
+#include <string.h>
-#include "clar_test.h"
+#include "selftest.h"
-/*
- * Sample main() for clar tests.
- *
- * You should write your own main routine for clar tests that does specific
- * setup and teardown as necessary for your application. The only required
- * line is the call to `clar_test(argc, argv)`, which will execute the test
- * suite. If you want to check the return value of the test application,
- * your main() should return the same value returned by clar_test().
- */
-
-int global_test_counter = 0;
+const char *selftest_suite_directory;
#ifdef _WIN32
int __cdecl main(int argc, char *argv[])
@@ -25,16 +11,15 @@ int __cdecl main(int argc, char *argv[])
int main(int argc, char *argv[])
#endif
{
- int ret;
-
- /* Your custom initialization here */
- global_test_counter = 0;
-
- /* Run the test suite */
- ret = clar_test(argc, argv);
+ if (argc < 2) {
+ fprintf(stderr, "usage: %s <selftest-suite-directory> <options>\n",
+ argv[0]);
+ exit(1);
+ }
- /* Your custom cleanup here */
- cl_assert_equal_i(8, global_test_counter);
+ selftest_suite_directory = argv[1];
+ memmove(argv + 1, argv + 2, argc - 1);
+ argc -= 1;
- return ret;
+ return clar_test(argc, argv);
}
diff --git a/t/unit-tests/clar/test/main.c.sample b/t/unit-tests/clar/test/main.c.sample
deleted file mode 100644
index a4d91b72fa..0000000000
--- a/t/unit-tests/clar/test/main.c.sample
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Copyright (c) Vicent Marti. All rights reserved.
- *
- * This file is part of clar, distributed under the ISC license.
- * For full terms see the included COPYING file.
- */
-
-#include "clar_test.h"
-
-/*
- * Minimal main() for clar tests.
- *
- * Modify this with any application specific setup or teardown that you need.
- * The only required line is the call to `clar_test(argc, argv)`, which will
- * execute the test suite. If you want to check the return value of the test
- * application, main() should return the same value returned by clar_test().
- */
-
-#ifdef _WIN32
-int __cdecl main(int argc, char *argv[])
-#else
-int main(int argc, char *argv[])
-#endif
-{
- /* Run the test suite */
- return clar_test(argc, argv);
-}
diff --git a/t/unit-tests/clar/test/selftest.c b/t/unit-tests/clar/test/selftest.c
new file mode 100644
index 0000000000..eed83e4512
--- /dev/null
+++ b/t/unit-tests/clar/test/selftest.c
@@ -0,0 +1,370 @@
+#include <stdarg.h>
+#include <stdio.h>
+#include <string.h>
+#include <sys/stat.h>
+
+#include "selftest.h"
+
+#ifdef _WIN32
+# define WIN32_LEAN_AND_MEAN
+# include <windows.h>
+
+static char *read_full(HANDLE h, int is_pipe)
+{
+ char *data = NULL;
+ size_t data_size = 0;
+
+ while (1) {
+ CHAR buf[4096];
+ DWORD bytes_read;
+
+ if (!ReadFile(h, buf, sizeof(buf), &bytes_read, NULL)) {
+ if (!is_pipe)
+ cl_fail("Failed reading file handle.");
+ cl_assert_equal_i(GetLastError(), ERROR_BROKEN_PIPE);
+ break;
+ }
+ if (!bytes_read)
+ break;
+
+ data = realloc(data, data_size + bytes_read);
+ cl_assert(data);
+ memcpy(data + data_size, buf, bytes_read);
+ data_size += bytes_read;
+ }
+
+ data = realloc(data, data_size + 1);
+ cl_assert(data);
+ data[data_size] = '\0';
+
+ while (strstr(data, "\r\n")) {
+ char *ptr = strstr(data, "\r\n");
+ memmove(ptr, ptr + 1, strlen(ptr));
+ }
+
+ return data;
+}
+
+static char *read_file(const char *path)
+{
+ char *content;
+ HANDLE file;
+
+ file = CreateFile(path, GENERIC_READ, FILE_SHARE_READ, NULL,
+ OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, NULL);
+ cl_assert(file != INVALID_HANDLE_VALUE);
+ content = read_full(file, 0);
+ cl_assert_equal_b(1, CloseHandle(file));
+
+ return content;
+}
+
+static char *execute(const char *suite, int expected_error_code, const char **args, size_t nargs)
+{
+ SECURITY_ATTRIBUTES security_attributes = { 0 };
+ PROCESS_INFORMATION process_info = { 0 };
+ STARTUPINFO startup_info = { 0 };
+ char binary_path[4096] = { 0 };
+ char cmdline[4096] = { 0 };
+ char *output = NULL;
+ HANDLE stdout_write;
+ HANDLE stdout_read;
+ DWORD exit_code;
+ size_t i;
+
+ snprintf(binary_path, sizeof(binary_path), "%s/%s_suite.exe",
+ selftest_suite_directory, suite);
+
+ /*
+ * Assemble command line arguments. In theory we'd have to properly
+ * quote them. In practice none of our tests actually care.
+ */
+ snprintf(cmdline, sizeof(cmdline), suite);
+ for (i = 0; i < nargs; i++) {
+ size_t cmdline_len = strlen(cmdline);
+ const char *arg = args[i];
+ cl_assert(cmdline_len + strlen(arg) < sizeof(cmdline));
+ snprintf(cmdline + cmdline_len, sizeof(cmdline) - cmdline_len,
+ " %s", arg);
+ }
+
+ /*
+ * Create a pipe that we will use to read data from the child process.
+ * The writing side needs to be inheritable such that the child can use
+ * it as stdout and stderr. The reading side should only be used by the
+ * parent.
+ */
+ security_attributes.nLength = sizeof(security_attributes);
+ security_attributes.bInheritHandle = TRUE;
+ cl_assert_equal_b(1, CreatePipe(&stdout_read, &stdout_write, &security_attributes, 0));
+ cl_assert_equal_b(1, SetHandleInformation(stdout_read, HANDLE_FLAG_INHERIT, 0));
+
+ /*
+ * Create the child process with our pipe.
+ */
+ startup_info.cb = sizeof(startup_info);
+ startup_info.hStdError = stdout_write;
+ startup_info.hStdOutput = stdout_write;
+ startup_info.dwFlags |= STARTF_USESTDHANDLES;
+ cl_assert_equal_b(1, CreateProcess(binary_path, cmdline, NULL, NULL, TRUE,
+ 0, NULL, NULL, &startup_info, &process_info));
+ cl_assert_equal_b(1, CloseHandle(stdout_write));
+
+ output = read_full(stdout_read, 1);
+ cl_assert_equal_b(1, CloseHandle(stdout_read));
+ cl_assert_equal_b(1, GetExitCodeProcess(process_info.hProcess, &exit_code));
+ cl_assert_equal_i(exit_code, expected_error_code);
+
+ return output;
+}
+
+static void assert_output(const char *suite, const char *expected_output_file, int expected_error_code, ...)
+{
+ char *expected_output = NULL;
+ char *output = NULL;
+ const char *args[16];
+ va_list ap;
+ size_t i;
+
+ va_start(ap, expected_error_code);
+ for (i = 0; ; i++) {
+ const char *arg = va_arg(ap, const char *);
+ if (!arg)
+ break;
+ cl_assert(i < sizeof(args) / sizeof(*args));
+ args[i] = arg;
+ }
+ va_end(ap);
+
+ output = execute(suite, expected_error_code, args, i);
+ expected_output = read_file(cl_fixture(expected_output_file));
+ cl_assert_equal_s(output, expected_output);
+
+ free(expected_output);
+ free(output);
+}
+
+#else
+# include <errno.h>
+# include <fcntl.h>
+# include <limits.h>
+# include <unistd.h>
+# include <sys/wait.h>
+
+static char *read_full(int fd)
+{
+ size_t data_bytes = 0;
+ char *data = NULL;
+
+ while (1) {
+ char buf[4096];
+ ssize_t n;
+
+ n = read(fd, buf, sizeof(buf));
+ if (n < 0) {
+ if (errno == EAGAIN || errno == EINTR)
+ continue;
+ cl_fail("Failed reading from child process.");
+ }
+ if (!n)
+ break;
+
+ data = realloc(data, data_bytes + n);
+ cl_assert(data);
+
+ memcpy(data + data_bytes, buf, n);
+ data_bytes += n;
+ }
+
+ data = realloc(data, data_bytes + 1);
+ cl_assert(data);
+ data[data_bytes] = '\0';
+
+ return data;
+}
+
+static char *read_file(const char *path)
+{
+ char *data;
+ int fd;
+
+ fd = open(path, O_RDONLY);
+ if (fd < 0)
+ cl_fail("Failed reading expected file.");
+
+ data = read_full(fd);
+ cl_must_pass(close(fd));
+
+ return data;
+}
+
+static char *execute(const char *suite, int expected_error_code, const char **args, size_t nargs)
+{
+ int pipe_fds[2];
+ pid_t pid;
+
+ cl_must_pass(pipe(pipe_fds));
+
+ pid = fork();
+ if (!pid) {
+ const char *final_args[17] = { NULL };
+ char binary_path[4096];
+ size_t len = 0;
+ size_t i;
+
+ cl_assert(nargs < sizeof(final_args) / sizeof(*final_args));
+ final_args[0] = suite;
+ for (i = 0; i < nargs; i++)
+ final_args[i + 1] = args[i];
+
+ if (dup2(pipe_fds[1], STDOUT_FILENO) < 0 ||
+ dup2(pipe_fds[1], STDERR_FILENO) < 0 ||
+ close(0) < 0 ||
+ close(pipe_fds[0]) < 0 ||
+ close(pipe_fds[1]) < 0)
+ exit(1);
+
+ cl_assert(len + strlen(selftest_suite_directory) < sizeof(binary_path));
+ strcpy(binary_path, selftest_suite_directory);
+ len += strlen(selftest_suite_directory);
+
+ cl_assert(len + 1 < sizeof(binary_path));
+ binary_path[len] = '/';
+ len += 1;
+
+ cl_assert(len + strlen(suite) < sizeof(binary_path));
+ strcpy(binary_path + len, suite);
+ len += strlen(suite);
+
+ cl_assert(len + strlen("_suite") < sizeof(binary_path));
+ strcpy(binary_path + len, "_suite");
+ len += strlen("_suite");
+
+ binary_path[len] = '\0';
+
+ execv(binary_path, (char **) final_args);
+ exit(1);
+ } else if (pid > 0) {
+ pid_t waited_pid;
+ char *output;
+ int stat;
+
+ cl_must_pass(close(pipe_fds[1]));
+
+ output = read_full(pipe_fds[0]);
+
+ waited_pid = waitpid(pid, &stat, 0);
+ cl_assert_equal_i(pid, waited_pid);
+ cl_assert(WIFEXITED(stat));
+ cl_assert_equal_i(WEXITSTATUS(stat), expected_error_code);
+
+ return output;
+ } else {
+ cl_fail("Fork failed.");
+ }
+
+ return NULL;
+}
+
+static void assert_output(const char *suite, const char *expected_output_file, int expected_error_code, ...)
+{
+ char *expected_output, *output;
+ const char *args[16];
+ va_list ap;
+ size_t i;
+
+ va_start(ap, expected_error_code);
+ for (i = 0; ; i++) {
+ cl_assert(i < sizeof(args) / sizeof(*args));
+ args[i] = va_arg(ap, const char *);
+ if (!args[i])
+ break;
+ }
+ va_end(ap);
+
+ output = execute(suite, expected_error_code, args, i);
+ expected_output = read_file(cl_fixture(expected_output_file));
+ cl_assert_equal_s(output, expected_output);
+
+ free(expected_output);
+ free(output);
+}
+#endif
+
+void test_selftest__help(void)
+{
+ cl_invoke(assert_output("combined", "help", 1, "-h", NULL));
+}
+
+void test_selftest__without_arguments(void)
+{
+ cl_invoke(assert_output("combined", "without_arguments", 9, NULL));
+}
+
+void test_selftest__specific_test(void)
+{
+ cl_invoke(assert_output("combined", "specific_test", 1, "-scombined::bool", NULL));
+}
+
+void test_selftest__stop_on_failure(void)
+{
+ cl_invoke(assert_output("combined", "stop_on_failure", 1, "-Q", NULL));
+}
+
+void test_selftest__quiet(void)
+{
+ cl_invoke(assert_output("combined", "quiet", 9, "-q", NULL));
+}
+
+void test_selftest__tap(void)
+{
+ cl_invoke(assert_output("combined", "tap", 9, "-t", NULL));
+}
+
+void test_selftest__suite_names(void)
+{
+ cl_invoke(assert_output("combined", "suite_names", 0, "-l", NULL));
+}
+
+void test_selftest__summary_without_filename(void)
+{
+ struct stat st;
+ cl_invoke(assert_output("combined", "summary_without_filename", 9, "-r", NULL));
+ /* The summary contains timestamps, so we cannot verify its contents. */
+ cl_must_pass(stat("summary.xml", &st));
+}
+
+void test_selftest__summary_with_filename(void)
+{
+ struct stat st;
+ cl_invoke(assert_output("combined", "summary_with_filename", 9, "-rdifferent.xml", NULL));
+ /* The summary contains timestamps, so we cannot verify its contents. */
+ cl_must_pass(stat("different.xml", &st));
+}
+
+void test_selftest__pointer_equal(void)
+{
+ const char *args[] = {
+ "-spointer::equal",
+ "-t"
+ };
+ char *output = execute("pointer", 0, args, 2);
+ cl_assert_equal_s(output,
+ "TAP version 13\n"
+ "# start of suite 1: pointer\n"
+ "ok 1 - pointer::equal\n"
+ "1..1\n"
+ );
+ free(output);
+}
+
+void test_selftest__pointer_unequal(void)
+{
+ const char *args[] = {
+ "-spointer::unequal",
+ };
+ char *output = execute("pointer", 1, args, 1);
+ cl_assert(output);
+ cl_assert(strstr(output, "Pointer mismatch: "));
+ free(output);
+}
diff --git a/t/unit-tests/clar/test/selftest.h b/t/unit-tests/clar/test/selftest.h
new file mode 100644
index 0000000000..c24e0c5af4
--- /dev/null
+++ b/t/unit-tests/clar/test/selftest.h
@@ -0,0 +1,3 @@
+#include "clar.h"
+
+extern const char *selftest_suite_directory;
diff --git a/t/unit-tests/clar/test/suites/CMakeLists.txt b/t/unit-tests/clar/test/suites/CMakeLists.txt
new file mode 100644
index 0000000000..fa8ab9416a
--- /dev/null
+++ b/t/unit-tests/clar/test/suites/CMakeLists.txt
@@ -0,0 +1,53 @@
+list(APPEND suites
+ "combined"
+ "pointer"
+)
+
+foreach(suite IN LISTS suites)
+ add_custom_command(OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/${suite}/clar.suite"
+ COMMAND "${Python_EXECUTABLE}"
+ "${CMAKE_SOURCE_DIR}/generate.py"
+ "${CMAKE_CURRENT_SOURCE_DIR}/${suite}.c"
+ --output "${CMAKE_CURRENT_BINARY_DIR}/${suite}"
+ DEPENDS ${suite}.c
+ WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
+ )
+
+ add_executable(${suite}_suite)
+ set_target_properties(${suite}_suite PROPERTIES
+ C_STANDARD 90
+ C_STANDARD_REQUIRED ON
+ C_EXTENSIONS OFF
+ )
+
+ # MSVC generates all kinds of warnings. We may want to fix these in the future
+ # and then unconditionally treat warnings as errors.
+ if(NOT MSVC)
+ set_target_properties(${suite}_suite PROPERTIES
+ COMPILE_WARNING_AS_ERROR ON
+ )
+ endif()
+
+ target_sources(${suite}_suite PRIVATE
+ main.c
+ ${suite}.c
+ "${CMAKE_CURRENT_BINARY_DIR}/${suite}/clar.suite"
+ )
+ target_compile_definitions(${suite}_suite PRIVATE
+ CLAR_FIXTURE_PATH="${CMAKE_CURRENT_SOURCE_DIR}/resources/"
+ CLAR_SELFTEST
+ )
+ target_compile_options(${suite}_suite PRIVATE
+ $<IF:$<CXX_COMPILER_ID:MSVC>,/W4,-Wall>
+ )
+ target_include_directories(${suite}_suite PRIVATE
+ "${CMAKE_SOURCE_DIR}"
+ "${CMAKE_CURRENT_BINARY_DIR}/${suite}"
+ )
+ target_link_libraries(${suite}_suite clar)
+
+ add_test(NAME build_${suite}_suite
+ COMMAND "${CMAKE_COMMAND}" --build "${CMAKE_BINARY_DIR}" --config "$<CONFIG>" --target selftest
+ )
+ set_tests_properties(build_${suite}_suite PROPERTIES FIXTURES_SETUP clar_test_fixture)
+endforeach()
diff --git a/t/unit-tests/clar/test/sample.c b/t/unit-tests/clar/test/suites/combined.c
index faa1209262..e8b41c98c3 100644
--- a/t/unit-tests/clar/test/sample.c
+++ b/t/unit-tests/clar/test/suites/combined.c
@@ -1,6 +1,7 @@
-#include "clar_test.h"
#include <sys/stat.h>
+#include "clar.h"
+
static int file_size(const char *filename)
{
struct stat st;
@@ -10,19 +11,14 @@ static int file_size(const char *filename)
return -1;
}
-void test_sample__initialize(void)
-{
- global_test_counter++;
-}
-
-void test_sample__cleanup(void)
+void test_combined__cleanup(void)
{
cl_fixture_cleanup("test");
cl_assert(file_size("test/file") == -1);
}
-void test_sample__1(void)
+void test_combined__1(void)
{
cl_assert(1);
cl_must_pass(0); /* 0 == success */
@@ -30,7 +26,7 @@ void test_sample__1(void)
cl_must_pass(-1); /* demonstrate a failing call */
}
-void test_sample__2(void)
+void test_combined__2(void)
{
cl_fixture_sandbox("test");
@@ -39,7 +35,7 @@ void test_sample__2(void)
cl_assert(100 == 101);
}
-void test_sample__strings(void)
+void test_combined__strings(void)
{
const char *actual = "expected";
cl_assert_equal_s("expected", actual);
@@ -47,7 +43,7 @@ void test_sample__strings(void)
cl_assert_equal_s_("mismatched", actual, "this one fails");
}
-void test_sample__strings_with_length(void)
+void test_combined__strings_with_length(void)
{
const char *actual = "expected";
cl_assert_equal_strn("expected_", actual, 8);
@@ -56,29 +52,34 @@ void test_sample__strings_with_length(void)
cl_assert_equal_strn_("exactly", actual, 3, "this one fails");
}
-void test_sample__int(void)
+void test_combined__int(void)
{
int value = 100;
cl_assert_equal_i(100, value);
cl_assert_equal_i_(101, value, "extra note on failing test");
}
-void test_sample__int_fmt(void)
+void test_combined__int_fmt(void)
{
int value = 100;
cl_assert_equal_i_fmt(022, value, "%04o");
}
-void test_sample__bool(void)
+void test_combined__bool(void)
{
int value = 100;
cl_assert_equal_b(1, value); /* test equality as booleans */
cl_assert_equal_b(0, value);
}
-void test_sample__ptr(void)
+void test_combined__multiline_description(void)
{
- const char *actual = "expected";
- cl_assert_equal_p(actual, actual); /* pointers to same object */
- cl_assert_equal_p(&actual, actual);
+ cl_must_pass_(-1, "description line 1\ndescription line 2");
+}
+
+void test_combined__null_string(void)
+{
+ const char *actual = NULL;
+ cl_assert_equal_s(actual, actual);
+ cl_assert_equal_s_("expected", actual, "this one fails");
}
diff --git a/t/unit-tests/clar/test/suites/main.c b/t/unit-tests/clar/test/suites/main.c
new file mode 100644
index 0000000000..3ab581d390
--- /dev/null
+++ b/t/unit-tests/clar/test/suites/main.c
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) Vicent Marti. All rights reserved.
+ *
+ * This file is part of clar, distributed under the ISC license.
+ * For full terms see the included COPYING file.
+ */
+
+#include "clar.h"
+
+/*
+ * Selftest main() for clar tests.
+ *
+ * You should write your own main routine for clar tests that does specific
+ * setup and teardown as necessary for your application. The only required
+ * line is the call to `clar_test(argc, argv)`, which will execute the test
+ * suite. If you want to check the return value of the test application,
+ * your main() should return the same value returned by clar_test().
+ */
+
+#ifdef _WIN32
+int __cdecl main(int argc, char *argv[])
+#else
+int main(int argc, char *argv[])
+#endif
+{
+ return clar_test(argc, argv);
+}
diff --git a/t/unit-tests/clar/test/suites/pointer.c b/t/unit-tests/clar/test/suites/pointer.c
new file mode 100644
index 0000000000..20535b159e
--- /dev/null
+++ b/t/unit-tests/clar/test/suites/pointer.c
@@ -0,0 +1,13 @@
+#include "clar.h"
+
+void test_pointer__equal(void)
+{
+ void *p1 = (void *)0x1;
+ cl_assert_equal_p(p1, p1);
+}
+
+void test_pointer__unequal(void)
+{
+ void *p1 = (void *)0x1, *p2 = (void *)0x2;
+ cl_assert_equal_p(p1, p2);
+}
diff --git a/t/unit-tests/clar/test/resources/test/file b/t/unit-tests/clar/test/suites/resources/test/file
index 220f4aa98a..220f4aa98a 100644
--- a/t/unit-tests/clar/test/resources/test/file
+++ b/t/unit-tests/clar/test/suites/resources/test/file