diff --git a/tests/environment/modbus_server_mock.py b/tests/environment/modbus_server_mock.py new file mode 100644 index 0000000..a95a218 --- /dev/null +++ b/tests/environment/modbus_server_mock.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python + +''' +Pymodbus Asynchronous Server Example +-------------------------------------------------------------------------- + +The asynchronous server is a high performance implementation using the +twisted library as its backend. This allows it to scale to many thousands +of nodes which can be helpful for testing monitoring software. +''' +#---------------------------------------------------------------------------# +# import the various server implementations +#---------------------------------------------------------------------------# +#from pymodbus.server.async import StartTcpServer +#from pymodbus.server.async import StartUdpServer +from pymodbus.server.async import StartSerialServer + +from pymodbus.device import ModbusDeviceIdentification +from pymodbus.datastore import ModbusSequentialDataBlock +from pymodbus.datastore import ModbusSlaveContext, ModbusServerContext +from pymodbus.transaction import ModbusRtuFramer, ModbusAsciiFramer + +from os import readlink + +class ModbusSerialServer: + #---------------------------------------------------------------------------# + # configure the service logging + #---------------------------------------------------------------------------# + import logging + logging.basicConfig() + log = logging.getLogger() + log.setLevel(logging.DEBUG) + + serialPort = readlink('/tmp/pts1') + + def _start_rtu_server(self, framer=ModbusRtuFramer): + # @req an open and existing /tmp/pts0 is required + #---------------------------------------------------------------------------# + # initialize your data store + #---------------------------------------------------------------------------# + # The datastores only respond to the addresses that they are initialized to. + # Therefore, if you initialize a DataBlock to addresses from 0x00 to 0xFF, a + # request to 0x100 will respond with an invalid address exception. This is + # because many devices exhibit this kind of behavior (but not all):: + # + # block = ModbusSequentialDataBlock(0x00, [0]*0xff) + # + # Continuing, you can choose to use a sequential or a sparse DataBlock in + # your data context. The difference is that the sequential has no gaps in + # the data while the sparse can. Once again, there are devices that exhibit + # both forms of behavior:: + # + # block = ModbusSparseDataBlock({0x00: 0, 0x05: 1}) + # block = ModbusSequentialDataBlock(0x00, [0]*5) + # + # Alternately, you can use the factory methods to initialize the DataBlocks + # or simply do not pass them to have them initialized to 0x00 on the full + # address range:: + # + # store = ModbusSlaveContext(di = ModbusSequentialDataBlock.create()) + # store = ModbusSlaveContext() + # + # Finally, you are allowed to use the same DataBlock reference for every + # table or you you may use a seperate DataBlock for each table. This depends + # if you would like functions to be able to access and modify the same data + # or not:: + # + # block = ModbusSequentialDataBlock(0x00, [0]*0xff) + # store = ModbusSlaveContext(di=block, co=block, hr=block, ir=block) + # + # The server then makes use of a server context that allows the server to + # respond with different slave contexts for different unit ids. By default + # it will return the same context for every unit id supplied (broadcast + # mode). However, this can be overloaded by setting the single flag to False + # and then supplying a dictionary of unit id to context mapping:: + # + # slaves = { + # 0x01: ModbusSlaveContext(...), + # 0x02: ModbusSlaveContext(...), + # 0x03: ModbusSlaveContext(...), + # } + # context = ModbusServerContext(slaves=slaves, single=False) + # + # The slave context can also be initialized in zero_mode which means that a + # request to address(0-7) will map to the address (0-7). The default is + # False which is based on section 4.4 of the specification, so address(0-7) + # will map to (1-8):: + # + # store = ModbusSlaveContext(..., zero_mode=True) + #---------------------------------------------------------------------------# + store = ModbusSlaveContext( + di = ModbusSequentialDataBlock(0, [12]*100), # discrete input + co = ModbusSequentialDataBlock(0, [13]*100), # coils + hr = ModbusSequentialDataBlock(0, [14]*100), # holding reg + ir = ModbusSequentialDataBlock(0, [15]*100)) # + context = ModbusServerContext(slaves=store, single=True) + + #---------------------------------------------------------------------------# + # initialize the server information + #---------------------------------------------------------------------------# + # If you don't set this or any fields, they are defaulted to empty strings. + #---------------------------------------------------------------------------# + identity = ModbusDeviceIdentification() + identity.VendorName = 'Pymodbus' + identity.ProductCode = 'PM' + identity.VendorUrl = 'http://github.com/bashwork/pymodbus/' + identity.ProductName = 'Pymodbus Server' + identity.ModelName = 'Pymodbus Server' + identity.MajorMinorRevision = '1.0' + + #---------------------------------------------------------------------------# + # run the server you want + #---------------------------------------------------------------------------# + #StartTcpServer(context, identity=identity, address=("localhost", 5020)) + #StartUdpServer(context, identity=identity, address=("localhost", 502)) + StartSerialServer(context, identity=identity, port=self.serialPort, baudrate=19200, framer=framer) + #StartSerialServer(context, identity=identity, port='/dev/pts/3', framer=ModbusAsciiFramer) + + p = None + def start(self): + from multiprocessing import Process + + self.p = Process(target=self._start_rtu_server) #args=('bob',) + self.p.daemon = True + self.p.start() + print("p.start done") + + + def kill(self): + print("Going to terminat the process, this could throw exceptins") + if self.p is not None: + self.p.terminate() + + +if __name__ == '__main__': + mbs = ModbusSerialServer() + mbs._start_rtu_server() + + #mbs.start() + + #import time + #time.sleep(3600) + #mbs.kill() + diff --git a/tests/environment/modbus_server_runner.sh b/tests/environment/modbus_server_runner.sh new file mode 100755 index 0000000..7d3108a --- /dev/null +++ b/tests/environment/modbus_server_runner.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash + +MBUS_SERVER_PID=/tmp/modbus_server.pid + +. ./subprocess_helper.sh + +check_preconditions() { + #TODO check if python module 'pymodbus' is installed + #python -c "import foo" + true +} + +# check argument count +## https://stackoverflow.com/questions/4341630/checking-for-the-correct-number-of-arguments +if [ "$#" -ne 1 ]; then + echo "Usage: $0 up|down" >&2 + exit 1 +fi + +check_preconditions +case "$1" in + up|start) + #TOOO obtain current directory + CURRENT_DIR="$(dirname "$(realpath "$0")")" + CMD="python ${CURRENT_DIR}/modbus_server_mock.py &" + run_cmd_save_pid "$CMD" $MBUS_SERVER_PID + ;; + down|stop) + kill_pid $MBUS_SERVER_PID + ;; +esac + diff --git a/tests/environment/socat_runner.sh b/tests/environment/socat_runner.sh new file mode 100755 index 0000000..acd0ce6 --- /dev/null +++ b/tests/environment/socat_runner.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +SOCAT_BIN=socat +SOCAT_ARGS="-d -d pty,raw,echo=0,link=/tmp/pts0 pty,raw,echo=0,link=/tmp/pts1" +SOCAT_PID_FILE=/tmp/socat.pid + +. ./subprocess_helper.sh + +check_preconditions() { + # check if socat is installed + ## https://stackoverflow.com/questions/592620/check-if-a-program-exists-from-a-bash-script + if ! [ -x "$(command -v $SOCAT_BIN)" ]; then + echo '[E] please install $SOCAT_BIN ' >&2 + exit 1 + fi +} + +# check argument count +## https://stackoverflow.com/questions/4341630/checking-for-the-correct-number-of-arguments +if [ "$#" -ne 1 ]; then + echo "Usage: $0 up|down" >&2 + exit 1 +fi + +check_preconditions +case "$1" in + up|start) + SOCAT_CMD="$(command -v socat) ${SOCAT_ARGS} &" + run_cmd_save_pid "$SOCAT_CMD" $SOCAT_PID_FILE + ;; + down|stop) + kill_pid $SOCAT_PID_FILE + ;; +esac + diff --git a/tests/environment/subprocess_helper.sh b/tests/environment/subprocess_helper.sh new file mode 100644 index 0000000..5d4c255 --- /dev/null +++ b/tests/environment/subprocess_helper.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash + +# @arg1 command to be executed in background as string +# @arg2 path to PID file +run_cmd_save_pid() { + local _CMD="$1" + local _PID_FILE=$2 + + if [[ -e $_PID_FILE ]]; then + echo "[D] pid_file ($_PID_FILE) exists, done" + return 1 + fi + + echo "[I] running $_CMD in background..." + eval $_CMD + if [[ $? -ne 0 ]] ; then + echo "[E] $_CMD call produced errors" + return 2 + fi + ## https://stackoverflow.com/questions/9890062/how-to-run-a-program-and-know-its-pid-in-linux + echo $! > $_PID_FILE +} +# @arg1 path to PID file +kill_pid() { + local _PID_FILE=$1 + + if [[ ! -e $_PID_FILE ]]; then + return 0 + fi + + echo "[I] killing pid file: $_PID_FILE" + kill `cat $_PID_FILE` + rm -f $_PID_FILE +} \ No newline at end of file diff --git a/tests/greatest/CONTRIBUTING.md b/tests/greatest/CONTRIBUTING.md new file mode 100644 index 0000000..39b5da2 --- /dev/null +++ b/tests/greatest/CONTRIBUTING.md @@ -0,0 +1,75 @@ +# Contributing to greatest + +Thanks for taking time to contribute to greatest! + +Please send patches or pull requests against the `develop` branch. This +makes it easier to avoid interface changes until they can be reflected +in version number updates. + +Sending changes via patch or pull request acknowledges that you are +willing and able to contribute it under this project's license. (Please +don't contribute code you aren't legally able to share.) + + +## Bug Reports + +Please report bugs at [the Issues page][issues]. + +[issues]: https://github.com/silentbicycle/greatest/issues). + +If you are reporting a bug, please include: + ++ Your operating system name and version. + ++ Your compiler version and target platform. + ++ Any details about your local setup that might be helpful in + troubleshooting. + ++ Detailed steps to reproduce the bug. + + +## Documentation + +Improvements to the documentation are welcome. So are requests for +clarification -- if the docs are unclear or misleading, that's a +potential source of bugs. + + +## Portability + +greatest tries to assume little about its environment. It targets ANSI C +(C89) as a baseline, and features that are specific to C99 or later need +to be wrapped in a version check. + +It doesn't require a particular OS, or that there is an OS at all. (It +may be running tests on an embedded system without an RTOS.) It uses +`fprintf(3)` for reporting, that's about it. + +Improvements that don't fit the portability requirements can go in +`contrib/`, just not the main `greatest.h` header. + + +## contrib/: Extensions and Other Tools + +There is a `contrib/` directory for extensions. This could include +scripts that generate test templates, add formatting to the reports, or +better integrate greatest into build / continuous integration systems. +Extension libraries that depend on dynamic allocation or +platform-specific features can also go here. + +Please include a license in any standalone scripts or extensions. + + +## Versioning & Compatibility + +The versioning format is MAJOR.MINOR.PATCH. + +Improvements or minor bug fixes that do not break compatibility with +past releases lead to patch version increases. API changes that do not +break compatibility lead to minor version increases and reset the patch +version, and changes that do break compatibility lead to a major version +increase. + +The version will be increased during the merge to master, as part of +the release process. diff --git a/tests/greatest/LICENSE b/tests/greatest/LICENSE new file mode 100644 index 0000000..ab51022 --- /dev/null +++ b/tests/greatest/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2011-2016 Scott Vokes + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/tests/greatest/README.md b/tests/greatest/README.md new file mode 100644 index 0000000..c4f3090 --- /dev/null +++ b/tests/greatest/README.md @@ -0,0 +1,338 @@ +# greatest + +A testing system for C, contained in 1 file. + + +## Key Features + +- **Small, Portable, Lightweight** + + greatest doesn't depend on anything beyond ANSI C89, and the test + scaffolding should build without warnings when compiled with + `-Wall -Wextra -pedantic`. It is under 1,000 LOC (SLOCCount), + and does no dynamic allocation. + +- **Permissive License** + + greatest is released under the [ISC License][ISC]. You can use it + freely, even for commercial purposes. + +- **Easy To Set Up** + + To use, just `#include "greatest.h"` in your project. There is + very little boilerplate. Most features are optional. + +- **Un-Opinionated** + + When a command-line test runner is useful, greatest can provide one, + but it can also run as part of other programs. It doesn't depend on + a particular build system or other tooling, and should accommodate a + variety of testing approaches. It actively avoids imposing + architectural choices on code under test. While greatest was + designed with C in mind, it attempts to be usable from C++. + +- **Modular** + + Tests can be run individually, or grouped into suites. Suites can + share common setup, and can be in distinct compilation + units. + +- **Low Friction** + + Specific tests or suites can be run by name, for focused and rapid + iteration during development. greatest adds very little startup + latency. + + +There are some compile-time options, and slightly nicer syntax for +parametric testing (running tests with arguments) if compiled +with a C99 or later language standard. + +I wrote a +[blog post](http://spin.atomicobject.com/2013/07/31/greatest-c-testing-embedded/) +with more information. + +[theft][], a related project, adds [property-based testing][pbt]. + +[1]: http://spin.atomicobject.com/2013/07/31/greatest-c-testing-embedded/ +[theft]: https://github.com/silentbicycle/theft +[pbt]: https://spin.atomicobject.com/2014/09/17/property-based-testing-c/ +[ISC]: https://opensource.org/licenses/isc-license.txt + +## Basic Usage + +```c +#include "greatest.h" + +/* A test runs various assertions, then calls PASS(), FAIL(), or SKIP(). */ +TEST x_should_equal_1(void) { + int x = 1; + ASSERT_EQ(1, x); /* default message */ + ASSERT_EQm("yikes, x doesn't equal 1", 1, x); /* custom message */ + /* printf expected and actual values as "%d" if they differ */ + ASSERT_EQ_FMT(1, x, "%d"); + PASS(); +} + +/* Suites can group multiple tests with common setup. */ +SUITE(the_suite) { + RUN_TEST(x_should_equal_1); +} + +/* Add definitions that need to be in the test runner's main file. */ +GREATEST_MAIN_DEFS(); + +int main(int argc, char **argv) { + GREATEST_MAIN_BEGIN(); /* command-line options, initialization. */ + + /* Individual tests can be run directly. */ + /* RUN_TEST(x_should_equal_1); */ + + /* Tests can also be gathered into test suites. */ + RUN_SUITE(the_suite); + + GREATEST_MAIN_END(); /* display results */ +} +``` + +Output: + +```sh +$ make simple && ./simple +cc -g -Wall -Werror -pedantic simple.c -o simple + +* Suite the_suite: +. +1 test - 1 passed, 0 failed, 0 skipped (5 ticks, 0.000 sec) + +Total: 1 test (47 ticks, 0.000 sec), 3 assertions +Pass: 1, fail: 0, skip: 0. +``` + +Test cases should call assertions and then end in `PASS()`, `SKIP()`, +`FAIL()`, or one of their message variants (e.g. `SKIPm("TODO");`). +If there are any test failures, the test runner will return 1, +otherwise it will return 0. (Skips do not cause a test runner to +report failure.) + +Tests and suites are just functions, so normal C scoping rules apply. +For example, a test or suite named "main" will have a name collision. + +(For more examples, look at `example.c` and `example_suite.c`.) + + +## Filtering By Name + +greatest runs all tests by default, but can be configured to only run +suites or tests whose names contain a filter string, and/or exclude +tests whose name contains a filter string. When test name filtering and +exclusion are used together, exclusion takes precedence. + + void greatest_set_suite_filter(const char *name); + void greatest_set_test_filter(const char *name); + void greatest_set_test_exclude(const char *name); + +These correspond to the following command line test runner options: + + `-s SUITE`: Only run suites whose names contain the string "SUITE" + `-t TEST`: Only run tests whose names contain the string "TEST" + `-x EXCLUDE`: Exclude tests whose names contain the string "EXCLUDE" + +For example, to run any tests with "tree" in the name, in suites with +"pars" in the name (such as "parser"), but exclude any tests whose names +also contain "slow": + + ./test_project -s pars -t tree -x slow + + +## Available Assertions + +Assertions fail the current test unless some condition holds. All +assertions have a "message" variant (with an `m` suffix), which takes a +custom failure message string as their first argument. For example, the +assertion `ASSERT_EQ(apple, orange);` could instead be used like +`ASSERT_EQm("these should match", apple, orange)`. Non-message +assertions create a default message. + + +### `ASSERT(COND)` + +Assert that `COND` evaluates to a true value. + + +### `ASSERT_FALSE(COND)` + +Assert that `COND` evaluates to a false value. + + +### `ASSERT_EQ(EXPECTED, ACTUAL)` + +Assert that `EXPECTED == ACTUAL`. To compare with a custom equality test +function, use `ASSERT_EQUAL_T` instead. To print the values if they +differ, use `ASSERT_EQ_FMT`. + + +### `ASSERT_EQ_FMT(EXPECTED, ACTUAL, FORMAT)` + +Assert that `EXPECTED == ACTUAL`. If they are not equal, print their +values using FORMAT as the `printf` format string. + +For example: `ASSERT_EQ_FMT(123, result, "%d");` + +Note: `EXPECTED` and `ACTUAL` will be evaluated more than once on +failure, so they should not be a function call with side effects. +(Since their type is not known by the macro, they cannot be +captured in a local variable.) + + +### `ASSERT_IN_RANGE(EXPECTED, ACTUAL, TOLERANCE)` + +Assert that ACTUAL is within EXPECTED +/- TOLERANCE, once the values +have been converted to a configurable floating point type +(`GREATEST_FLOAT`). + + +### `ASSERT_STR_EQ(EXPECTED, ACTUAL)` + +Assert that the strings are equal +(i.e., `strcmp(EXPECTED, ACTUAL) == 0`). + + +### `ASSERT_STRN_EQ(EXPECTED, ACTUAL, SIZE)` + +Assert that the first SIZE bytes of the strings are equal +(i.e., `strncmp(EXPECTED, ACTUAL, SIZE) == 0`). + + +### `ASSERT_MEM_EQ(EXPECTED, ACTUAL, SIZE)` + +Assert that the first SIZE bytes of memory pointed to +by EXPECTED and ACTUAL are equal. If the memory differs, print +a hexdump and highlight the lines and individual bytes which +do not match. + + +### `ASSERT_ENUM_EQ(EXPECTED, ACTUAL, ENUM_STR_FUN)` + +Assert that the enum value EXPECTED is equal to ACTUAL. If not, convert +each enum value to a string using `ENUM_STR_FUN` before printing them. + +`ENUM_STR_FUN` should have a type like: + + const char *some_enum_str(enum some_enum x); + + +### `ASSERT_EQUAL_T(EXPECTED, ACTUAL, TYPE_INFO, UDATA)` + +Assert that EXPECTED and ACTUAL are equal, using the `greatest_equal_cb` +function pointed to by `TYPE_INFO->equal` to compare them. The +assertion's `UDATA` argument can be used to pass in arbitrary user data +(or `NULL`). If the values are not equal and the `TYPE_INFO->print` +function is defined, it will be used to print an "Expected: X, Got: Y" +message. + + +### `ASSERT_OR_LONGJMP(COND)` + +Assert that `COND` evaluates to a true value. If not, then use +`longjmp(3)` to immediately return from the test case and any +intermediate function calls. (If built with `GREATEST_USE_LONGJMP` +defined to 0, then all setjmp/longjmp-related functionality will be +compiled out.) + + +## Random Shuffling + +Groups of suites or tests can be run in random order by using +`GREATEST_SHUFFLE_SUITES` and `GREATEST_SHUFFLE_TESTS`, respectively. +This can help find and eliminate coupling between tests. + +The shuffling depends on the seed and the test/suite count, so a +consistent seed will only lead to reproducible ordering until the +group's count changes. + +Shuffling suites: + + SHUFFLE_SUITES(seed, { + RUN_SUITE(suite1); + RUN_SUITE(suite2); + RUN_SUITE(suite3); + RUN_SUITE(suite4); + RUN_SUITE(suite5); + }); + +Shuffling tests: + + SHUFFLE_TESTS(seed, { + RUN_TEST(test_a); + RUN_TEST1(test_b, 12345); + RUN_TEST(test_c); + RUN_TESTp(test_d, "some_argument"); + RUN_TEST(test_e); + }); + +Note: Any other code inside the block will be executed several times. +The shuffling macro expands to a loop with (count + 1) iterations -- the +first pass counts, and the following passes only execute the next chosen +suite/test. In particular, avoid running tests directly inside of a +`SHUFFLE_SUITES` block (without a suite), because the test will run over +and over. + + +## Sub-Functions + +Because of how `PASS()`, `ASSERT()`, `FAIL()`, etc. are implemented +(returning a test result enum value), calls to functions that use them +directly from test functions must be wrapped in `CHECK_CALL`: + + TEST example_using_subfunctions(void) { + CHECK_CALL(less_than_three(5)); + PASS(); + } + +This is only necessary if the called function can cause test failures. + + +## Command Line Options + +Test runners build with the following command line options: + + Usage: (test_runner) [--help] [-hlfv] [-s SUITE] [-t TEST] + -h, --help print this Help + -l List suites and tests, then exit (dry run) + -f Stop runner after first failure + -v Verbose output + -s SUITE only run suite w/ name containing SUITE substring + -t TEST only run test w/ name containing TEST substring + -t EXCLUDE exclude tests containing string EXCLUDE substring + +Any arguments after `--` will be ignored. + +If you want to run multiple test suites in parallel, look at +[parade](https://github.com/silentbicycle/parade). + +These command line options are processed by `GREATEST_MAIN_BEGIN();`. + + +## Aliases + +Most of the macros have prefixed and unprefixed forms. For example, +`SUITE` is the same as `GREATEST_SUITE`. + +Check the source for the list -- search for `#if GREATEST_USE_ABBREVS`. + +These aliases can be disabled by `#define`-ing `GREATEST_USE_ABBREVS` to 0. + + +## Color Output + +If you want color output (`PASS` in green, `FAIL` in red, etc.), you can +pipe the output through the included `greenest` script in `contrib/`: + +```sh +$ ./example -v | greenest +``` + +(Note that `greenest` depends on a Unix-like environment.) + +greatest itself doesn't have built-in coloring to stay small and portable. diff --git a/tests/greatest/github.lnk b/tests/greatest/github.lnk new file mode 100644 index 0000000..0fa8a52 --- /dev/null +++ b/tests/greatest/github.lnk @@ -0,0 +1,2 @@ +# this is an external repository, its target is +https://github.com/silentbicycle/greatest \ No newline at end of file diff --git a/tests/greatest/greatest.h b/tests/greatest/greatest.h new file mode 100644 index 0000000..654918f --- /dev/null +++ b/tests/greatest/greatest.h @@ -0,0 +1,1189 @@ +/* + * Copyright (c) 2011-2017 Scott Vokes + * + * Permission to use, copy, modify, and/or distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +#ifndef GREATEST_H +#define GREATEST_H + +#if defined(__cplusplus) && !defined(GREATEST_NO_EXTERN_CPLUSPLUS) +extern "C" { +#endif + +/* 1.3.1 */ +#define GREATEST_VERSION_MAJOR 1 +#define GREATEST_VERSION_MINOR 3 +#define GREATEST_VERSION_PATCH 1 + +/* A unit testing system for C, contained in 1 file. + * It doesn't use dynamic allocation or depend on anything + * beyond ANSI C89. + * + * An up-to-date version can be found at: + * https://github.com/silentbicycle/greatest/ + */ + + +/********************************************************************* + * Minimal test runner template + *********************************************************************/ +#if 0 + +#include "greatest.h" + +TEST foo_should_foo(void) { + PASS(); +} + +static void setup_cb(void *data) { + printf("setup callback for each test case\n"); +} + +static void teardown_cb(void *data) { + printf("teardown callback for each test case\n"); +} + +SUITE(suite) { + /* Optional setup/teardown callbacks which will be run before/after + * every test case. If using a test suite, they will be cleared when + * the suite finishes. */ + SET_SETUP(setup_cb, voidp_to_callback_data); + SET_TEARDOWN(teardown_cb, voidp_to_callback_data); + + RUN_TEST(foo_should_foo); +} + +/* Add definitions that need to be in the test runner's main file. */ +GREATEST_MAIN_DEFS(); + +/* Set up, run suite(s) of tests, report pass/fail/skip stats. */ +int run_tests(void) { + GREATEST_INIT(); /* init. greatest internals */ + /* List of suites to run (if any). */ + RUN_SUITE(suite); + + /* Tests can also be run directly, without using test suites. */ + RUN_TEST(foo_should_foo); + + GREATEST_PRINT_REPORT(); /* display results */ + return greatest_all_passed(); +} + +/* main(), for a standalone command-line test runner. + * This replaces run_tests above, and adds command line option + * handling and exiting with a pass/fail status. */ +int main(int argc, char **argv) { + GREATEST_MAIN_BEGIN(); /* init & parse command-line args */ + RUN_SUITE(suite); + GREATEST_MAIN_END(); /* display results */ +} + +#endif +/*********************************************************************/ + + +#include +#include +#include +#include + +/*********** + * Options * + ***********/ + +/* Default column width for non-verbose output. */ +#ifndef GREATEST_DEFAULT_WIDTH +#define GREATEST_DEFAULT_WIDTH 72 +#endif + +/* FILE *, for test logging. */ +#ifndef GREATEST_STDOUT +#define GREATEST_STDOUT stdout +#endif + +/* Remove GREATEST_ prefix from most commonly used symbols? */ +#ifndef GREATEST_USE_ABBREVS +#define GREATEST_USE_ABBREVS 1 +#endif + +/* Set to 0 to disable all use of setjmp/longjmp. */ +#ifndef GREATEST_USE_LONGJMP +#define GREATEST_USE_LONGJMP 1 +#endif + +/* Make it possible to replace fprintf with another + * function with the same interface. */ +#ifndef GREATEST_FPRINTF +#define GREATEST_FPRINTF fprintf +#endif + +#if GREATEST_USE_LONGJMP +#include +#endif + +/* Set to 0 to disable all use of time.h / clock(). */ +#ifndef GREATEST_USE_TIME +#define GREATEST_USE_TIME 1 +#endif + +#if GREATEST_USE_TIME +#include +#endif + +/* Floating point type, for ASSERT_IN_RANGE. */ +#ifndef GREATEST_FLOAT +#define GREATEST_FLOAT double +#define GREATEST_FLOAT_FMT "%g" +#endif + + +/********* + * Types * + *********/ + +/* Info for the current running suite. */ +typedef struct greatest_suite_info { + unsigned int tests_run; + unsigned int passed; + unsigned int failed; + unsigned int skipped; + +#if GREATEST_USE_TIME + /* timers, pre/post running suite and individual tests */ + clock_t pre_suite; + clock_t post_suite; + clock_t pre_test; + clock_t post_test; +#endif +} greatest_suite_info; + +/* Type for a suite function. */ +typedef void greatest_suite_cb(void); + +/* Types for setup/teardown callbacks. If non-NULL, these will be run + * and passed the pointer to their additional data. */ +typedef void greatest_setup_cb(void *udata); +typedef void greatest_teardown_cb(void *udata); + +/* Type for an equality comparison between two pointers of the same type. + * Should return non-0 if equal, otherwise 0. + * UDATA is a closure value, passed through from ASSERT_EQUAL_T[m]. */ +typedef int greatest_equal_cb(const void *exp, const void *got, void *udata); + +/* Type for a callback that prints a value pointed to by T. + * Return value has the same meaning as printf's. + * UDATA is a closure value, passed through from ASSERT_EQUAL_T[m]. */ +typedef int greatest_printf_cb(const void *t, void *udata); + +/* Callbacks for an arbitrary type; needed for type-specific + * comparisons via GREATEST_ASSERT_EQUAL_T[m].*/ +typedef struct greatest_type_info { + greatest_equal_cb *equal; + greatest_printf_cb *print; +} greatest_type_info; + +typedef struct greatest_memory_cmp_env { + const unsigned char *exp; + const unsigned char *got; + size_t size; +} greatest_memory_cmp_env; + +/* Callbacks for string and raw memory types. */ +extern greatest_type_info greatest_type_info_string; +extern greatest_type_info greatest_type_info_memory; + +typedef enum { + GREATEST_FLAG_FIRST_FAIL = 0x01, + GREATEST_FLAG_LIST_ONLY = 0x02 +} greatest_flag_t; + +/* Internal state for a PRNG, used to shuffle test order. */ +struct greatest_prng { + unsigned char random_order; /* use random ordering? */ + unsigned char initialized; /* is random ordering initialized? */ + unsigned char pad_0[2]; + unsigned long state; /* PRNG state */ + unsigned long count; /* how many tests, this pass */ + unsigned long count_ceil; /* total number of tests */ + unsigned long count_run; /* total tests run */ + unsigned long mod; /* power-of-2 ceiling of count_ceil */ + unsigned long a; /* LCG multiplier */ + unsigned long c; /* LCG increment */ +}; + +/* Struct containing all test runner state. */ +typedef struct greatest_run_info { + unsigned char flags; + unsigned char verbosity; + unsigned char pad_0[2]; + + unsigned int tests_run; /* total test count */ + + /* currently running test suite */ + greatest_suite_info suite; + + /* overall pass/fail/skip counts */ + unsigned int passed; + unsigned int failed; + unsigned int skipped; + unsigned int assertions; + + /* info to print about the most recent failure */ + unsigned int fail_line; + unsigned int pad_1; + const char *fail_file; + const char *msg; + + /* current setup/teardown hooks and userdata */ + greatest_setup_cb *setup; + void *setup_udata; + greatest_teardown_cb *teardown; + void *teardown_udata; + + /* formatting info for ".....s...F"-style output */ + unsigned int col; + unsigned int width; + + /* only run a specific suite or test */ + const char *suite_filter; + const char *test_filter; + const char *test_exclude; + + struct greatest_prng prng[2]; /* 0: suites, 1: tests */ + +#if GREATEST_USE_TIME + /* overall timers */ + clock_t begin; + clock_t end; +#endif + +#if GREATEST_USE_LONGJMP + int pad_jmp_buf; + jmp_buf jump_dest; +#endif +} greatest_run_info; + +struct greatest_report_t { + /* overall pass/fail/skip counts */ + unsigned int passed; + unsigned int failed; + unsigned int skipped; + unsigned int assertions; +}; + +/* Global var for the current testing context. + * Initialized by GREATEST_MAIN_DEFS(). */ +extern greatest_run_info greatest_info; + +/* Type for ASSERT_ENUM_EQ's ENUM_STR argument. */ +typedef const char *greatest_enum_str_fun(int value); + +/********************** + * Exported functions * + **********************/ + +/* These are used internally by greatest. */ +void greatest_do_pass(const char *name); +void greatest_do_fail(const char *name); +void greatest_do_skip(const char *name); +int greatest_suite_pre(const char *suite_name); +void greatest_suite_post(void); +int greatest_test_pre(const char *name); +void greatest_test_post(const char *name, int res); +void greatest_usage(const char *name); +int greatest_do_assert_equal_t(const void *exp, const void *got, +greatest_type_info *type_info, void *udata); +void greatest_prng_init_first_pass(int id); +int greatest_prng_init_second_pass(int id, unsigned long seed); +void greatest_prng_step(int id); + +/* These are part of the public greatest API. */ +void GREATEST_SET_SETUP_CB(greatest_setup_cb *cb, void *udata); +void GREATEST_SET_TEARDOWN_CB(greatest_teardown_cb *cb, void *udata); +int greatest_all_passed(void); +void greatest_set_suite_filter(const char *filter); +void greatest_set_test_filter(const char *filter); +void greatest_set_test_exclude(const char *filter); +void greatest_stop_at_first_fail(void); +void greatest_get_report(struct greatest_report_t *report); +unsigned int greatest_get_verbosity(void); +void greatest_set_verbosity(unsigned int verbosity); +void greatest_set_flag(greatest_flag_t flag); + + +/******************** +* Language Support * +********************/ + +/* If __VA_ARGS__ (C99) is supported, allow parametric testing +* without needing to manually manage the argument struct. */ +#if __STDC_VERSION__ >= 19901L || _MSC_VER >= 1800 +#define GREATEST_VA_ARGS +#endif + + +/********** + * Macros * + **********/ + +/* Define a suite. */ +#define GREATEST_SUITE(NAME) void NAME(void); void NAME(void) + +/* Declare a suite, provided by another compilation unit. */ +#define GREATEST_SUITE_EXTERN(NAME) void NAME(void) + +/* Start defining a test function. + * The arguments are not included, to allow parametric testing. */ +#define GREATEST_TEST static enum greatest_test_res + +/* PASS/FAIL/SKIP result from a test. Used internally. */ +typedef enum greatest_test_res { + GREATEST_TEST_RES_PASS = 0, + GREATEST_TEST_RES_FAIL = -1, + GREATEST_TEST_RES_SKIP = 1 +} greatest_test_res; + +/* Run a suite. */ +#define GREATEST_RUN_SUITE(S_NAME) greatest_run_suite(S_NAME, #S_NAME) + +/* Run a test in the current suite. */ +#define GREATEST_RUN_TEST(TEST) \ + do { \ + if (greatest_test_pre(#TEST) == 1) { \ + enum greatest_test_res res = GREATEST_SAVE_CONTEXT(); \ + if (res == GREATEST_TEST_RES_PASS) { \ + res = TEST(); \ + } \ + greatest_test_post(#TEST, res); \ + } \ + } while (0) + +/* Ignore a test, don't warn about it being unused. */ +#define GREATEST_IGNORE_TEST(TEST) (void)TEST + +/* Run a test in the current suite with one void * argument, + * which can be a pointer to a struct with multiple arguments. */ +#define GREATEST_RUN_TEST1(TEST, ENV) \ + do { \ + if (greatest_test_pre(#TEST) == 1) { \ + enum greatest_test_res res = GREATEST_SAVE_CONTEXT(); \ + if (res == GREATEST_TEST_RES_PASS) { \ + res = TEST(ENV); \ + } \ + greatest_test_post(#TEST, res); \ + } \ + } while (0) + +#ifdef GREATEST_VA_ARGS +#define GREATEST_RUN_TESTp(TEST, ...) \ + do { \ + if (greatest_test_pre(#TEST) == 1) { \ + enum greatest_test_res res = GREATEST_SAVE_CONTEXT(); \ + if (res == GREATEST_TEST_RES_PASS) { \ + res = TEST(__VA_ARGS__); \ + } \ + greatest_test_post(#TEST, res); \ + } \ + } while (0) +#endif + + +/* Check if the test runner is in verbose mode. */ +#define GREATEST_IS_VERBOSE() ((greatest_info.verbosity) > 0) +#define GREATEST_LIST_ONLY() \ + (greatest_info.flags & GREATEST_FLAG_LIST_ONLY) +#define GREATEST_FIRST_FAIL() \ + (greatest_info.flags & GREATEST_FLAG_FIRST_FAIL) +#define GREATEST_FAILURE_ABORT() \ + (GREATEST_FIRST_FAIL() && \ + (greatest_info.suite.failed > 0 || greatest_info.failed > 0)) + +/* Message-less forms of tests defined below. */ +#define GREATEST_PASS() GREATEST_PASSm(NULL) +#define GREATEST_FAIL() GREATEST_FAILm(NULL) +#define GREATEST_SKIP() GREATEST_SKIPm(NULL) +#define GREATEST_ASSERT(COND) \ + GREATEST_ASSERTm(#COND, COND) +#define GREATEST_ASSERT_OR_LONGJMP(COND) \ + GREATEST_ASSERT_OR_LONGJMPm(#COND, COND) +#define GREATEST_ASSERT_FALSE(COND) \ + GREATEST_ASSERT_FALSEm(#COND, COND) +#define GREATEST_ASSERT_EQ(EXP, GOT) \ + GREATEST_ASSERT_EQm(#EXP " != " #GOT, EXP, GOT) +#define GREATEST_ASSERT_EQ_FMT(EXP, GOT, FMT) \ + GREATEST_ASSERT_EQ_FMTm(#EXP " != " #GOT, EXP, GOT, FMT) +#define GREATEST_ASSERT_IN_RANGE(EXP, GOT, TOL) \ + GREATEST_ASSERT_IN_RANGEm(#EXP " != " #GOT " +/- " #TOL, EXP, GOT, TOL) +#define GREATEST_ASSERT_EQUAL_T(EXP, GOT, TYPE_INFO, UDATA) \ + GREATEST_ASSERT_EQUAL_Tm(#EXP " != " #GOT, EXP, GOT, TYPE_INFO, UDATA) +#define GREATEST_ASSERT_STR_EQ(EXP, GOT) \ + GREATEST_ASSERT_STR_EQm(#EXP " != " #GOT, EXP, GOT) +#define GREATEST_ASSERT_STRN_EQ(EXP, GOT, SIZE) \ + GREATEST_ASSERT_STRN_EQm(#EXP " != " #GOT, EXP, GOT, SIZE) +#define GREATEST_ASSERT_MEM_EQ(EXP, GOT, SIZE) \ + GREATEST_ASSERT_MEM_EQm(#EXP " != " #GOT, EXP, GOT, SIZE) +#define GREATEST_ASSERT_ENUM_EQ(EXP, GOT, ENUM_STR) \ + GREATEST_ASSERT_ENUM_EQm(#EXP " != " #GOT, EXP, GOT, ENUM_STR) + +/* The following forms take an additional message argument first, + * to be displayed by the test runner. */ + +/* Fail if a condition is not true, with message. */ +#define GREATEST_ASSERTm(MSG, COND) \ + do { \ + greatest_info.assertions++; \ + if (!(COND)) { GREATEST_FAILm(MSG); } \ + } while (0) + +/* Fail if a condition is not true, longjmping out of test. */ +#define GREATEST_ASSERT_OR_LONGJMPm(MSG, COND) \ + do { \ + greatest_info.assertions++; \ + if (!(COND)) { GREATEST_FAIL_WITH_LONGJMPm(MSG); } \ + } while (0) + +/* Fail if a condition is not false, with message. */ +#define GREATEST_ASSERT_FALSEm(MSG, COND) \ + do { \ + greatest_info.assertions++; \ + if ((COND)) { GREATEST_FAILm(MSG); } \ + } while (0) + +/* Fail if EXP != GOT (equality comparison by ==). */ +#define GREATEST_ASSERT_EQm(MSG, EXP, GOT) \ + do { \ + greatest_info.assertions++; \ + if ((EXP) != (GOT)) { GREATEST_FAILm(MSG); } \ + } while (0) + +/* Fail if EXP != GOT (equality comparison by ==). + * Warning: FMT, EXP, and GOT will be evaluated more + * than once on failure. */ +#define GREATEST_ASSERT_EQ_FMTm(MSG, EXP, GOT, FMT) \ + do { \ + greatest_info.assertions++; \ + if ((EXP) != (GOT)) { \ + GREATEST_FPRINTF(GREATEST_STDOUT, "\nExpected: "); \ + GREATEST_FPRINTF(GREATEST_STDOUT, FMT, EXP); \ + GREATEST_FPRINTF(GREATEST_STDOUT, "\n Got: "); \ + GREATEST_FPRINTF(GREATEST_STDOUT, FMT, GOT); \ + GREATEST_FPRINTF(GREATEST_STDOUT, "\n"); \ + GREATEST_FAILm(MSG); \ + } \ + } while (0) + +/* Fail if EXP is not equal to GOT, printing enum IDs. */ +#define GREATEST_ASSERT_ENUM_EQm(MSG, EXP, GOT, ENUM_STR) \ + do { \ + int greatest_EXP = (int)(EXP); \ + int greatest_GOT = (int)(GOT); \ + greatest_enum_str_fun *greatest_ENUM_STR = ENUM_STR; \ + if (greatest_EXP != greatest_GOT) { \ + GREATEST_FPRINTF(GREATEST_STDOUT, "\nExpected: %s", \ + greatest_ENUM_STR(greatest_EXP)); \ + GREATEST_FPRINTF(GREATEST_STDOUT, "\n Got: %s\n", \ + greatest_ENUM_STR(greatest_GOT)); \ + GREATEST_FAILm(MSG); \ + } \ + } while (0) \ + +/* Fail if GOT not in range of EXP +|- TOL. */ +#define GREATEST_ASSERT_IN_RANGEm(MSG, EXP, GOT, TOL) \ + do { \ + GREATEST_FLOAT greatest_EXP = (EXP); \ + GREATEST_FLOAT greatest_GOT = (GOT); \ + GREATEST_FLOAT greatest_TOL = (TOL); \ + greatest_info.assertions++; \ + if ((greatest_EXP > greatest_GOT && \ + greatest_EXP - greatest_GOT > greatest_TOL) || \ + (greatest_EXP < greatest_GOT && \ + greatest_GOT - greatest_EXP > greatest_TOL)) { \ + GREATEST_FPRINTF(GREATEST_STDOUT, \ + "\nExpected: " GREATEST_FLOAT_FMT \ + " +/- " GREATEST_FLOAT_FMT \ + "\n Got: " GREATEST_FLOAT_FMT \ + "\n", \ + greatest_EXP, greatest_TOL, greatest_GOT); \ + GREATEST_FAILm(MSG); \ + } \ + } while (0) + +/* Fail if EXP is not equal to GOT, according to strcmp. */ +#define GREATEST_ASSERT_STR_EQm(MSG, EXP, GOT) \ + do { \ + GREATEST_ASSERT_EQUAL_Tm(MSG, EXP, GOT, \ + &greatest_type_info_string, NULL); \ + } while (0) \ + +/* Fail if EXP is not equal to GOT, according to strcmp. */ +#define GREATEST_ASSERT_STRN_EQm(MSG, EXP, GOT, SIZE) \ + do { \ + size_t size = SIZE; \ + GREATEST_ASSERT_EQUAL_Tm(MSG, EXP, GOT, \ + &greatest_type_info_string, &size); \ + } while (0) \ + +/* Fail if EXP is not equal to GOT, according to memcmp. */ +#define GREATEST_ASSERT_MEM_EQm(MSG, EXP, GOT, SIZE) \ + do { \ + greatest_memory_cmp_env env; \ + env.exp = (const unsigned char *)EXP; \ + env.got = (const unsigned char *)GOT; \ + env.size = SIZE; \ + GREATEST_ASSERT_EQUAL_Tm(MSG, env.exp, env.got, \ + &greatest_type_info_memory, &env); \ + } while (0) \ + +/* Fail if EXP is not equal to GOT, according to a comparison + * callback in TYPE_INFO. If they are not equal, optionally use a + * print callback in TYPE_INFO to print them. */ +#define GREATEST_ASSERT_EQUAL_Tm(MSG, EXP, GOT, TYPE_INFO, UDATA) \ + do { \ + greatest_type_info *type_info = (TYPE_INFO); \ + greatest_info.assertions++; \ + if (!greatest_do_assert_equal_t(EXP, GOT, \ + type_info, UDATA)) { \ + if (type_info == NULL || type_info->equal == NULL) { \ + GREATEST_FAILm("type_info->equal callback missing!"); \ + } else { \ + GREATEST_FAILm(MSG); \ + } \ + } \ + } while (0) \ + +/* Pass. */ +#define GREATEST_PASSm(MSG) \ + do { \ + greatest_info.msg = MSG; \ + return GREATEST_TEST_RES_PASS; \ + } while (0) + +/* Fail. */ +#define GREATEST_FAILm(MSG) \ + do { \ + greatest_info.fail_file = __FILE__; \ + greatest_info.fail_line = __LINE__; \ + greatest_info.msg = MSG; \ + return GREATEST_TEST_RES_FAIL; \ + } while (0) + +/* Optional GREATEST_FAILm variant that longjmps. */ +#if GREATEST_USE_LONGJMP +#define GREATEST_FAIL_WITH_LONGJMP() GREATEST_FAIL_WITH_LONGJMPm(NULL) +#define GREATEST_FAIL_WITH_LONGJMPm(MSG) \ + do { \ + greatest_info.fail_file = __FILE__; \ + greatest_info.fail_line = __LINE__; \ + greatest_info.msg = MSG; \ + longjmp(greatest_info.jump_dest, GREATEST_TEST_RES_FAIL); \ + } while (0) +#endif + +/* Skip the current test. */ +#define GREATEST_SKIPm(MSG) \ + do { \ + greatest_info.msg = MSG; \ + return GREATEST_TEST_RES_SKIP; \ + } while (0) + +/* Check the result of a subfunction using ASSERT, etc. */ +#define GREATEST_CHECK_CALL(RES) \ + do { \ + enum greatest_test_res greatest_RES = RES; \ + if (greatest_RES != GREATEST_TEST_RES_PASS) { \ + return greatest_RES; \ + } \ + } while (0) \ + +#if GREATEST_USE_TIME +#define GREATEST_SET_TIME(NAME) \ + NAME = clock(); \ + if (NAME == (clock_t) -1) { \ + GREATEST_FPRINTF(GREATEST_STDOUT, \ + "clock error: %s\n", #NAME); \ + exit(EXIT_FAILURE); \ + } + +#define GREATEST_CLOCK_DIFF(C1, C2) \ + GREATEST_FPRINTF(GREATEST_STDOUT, " (%lu ticks, %.3f sec)", \ + (long unsigned int) (C2) - (long unsigned int)(C1), \ + (double)((C2) - (C1)) / (1.0 * (double)CLOCKS_PER_SEC)) +#else +#define GREATEST_SET_TIME(UNUSED) +#define GREATEST_CLOCK_DIFF(UNUSED1, UNUSED2) +#endif + +#if GREATEST_USE_LONGJMP +#define GREATEST_SAVE_CONTEXT() \ + /* setjmp returns 0 (GREATEST_TEST_RES_PASS) on first call * \ + * so the test runs, then RES_FAIL from FAIL_WITH_LONGJMP. */ \ + ((enum greatest_test_res)(setjmp(greatest_info.jump_dest))) +#else +#define GREATEST_SAVE_CONTEXT() \ + /*a no-op, since setjmp/longjmp aren't being used */ \ + GREATEST_TEST_RES_PASS +#endif + +/* Run every suite / test function run within BODY in pseudo-random + * order, seeded by SEED. (The top 3 bits of the seed are ignored.) + * + * This should be called like: + * GREATEST_SHUFFLE_TESTS(seed, { + * GREATEST_RUN_TEST(some_test); + * GREATEST_RUN_TEST(some_other_test); + * GREATEST_RUN_TEST(yet_another_test); + * }); + * + * Note that the body of the second argument will be evaluated + * multiple times. */ +#define GREATEST_SHUFFLE_SUITES(SD, BODY) GREATEST_SHUFFLE(0, SD, BODY) +#define GREATEST_SHUFFLE_TESTS(SD, BODY) GREATEST_SHUFFLE(1, SD, BODY) +#define GREATEST_SHUFFLE(ID, SD, BODY) \ + do { \ + struct greatest_prng *prng = &greatest_info.prng[ID]; \ + greatest_prng_init_first_pass(ID); \ + do { \ + prng->count = 0; \ + if (prng->initialized) { greatest_prng_step(ID); } \ + BODY; \ + if (!prng->initialized) { \ + if (!greatest_prng_init_second_pass(ID, SD)) { break; } \ + } else if (prng->count_run == prng->count_ceil) { \ + break; \ + } \ + } while (!GREATEST_FAILURE_ABORT()); \ + prng->count_run = prng->random_order = prng->initialized = 0; \ + } while(0) + +/* Include several function definitions in the main test file. */ +#define GREATEST_MAIN_DEFS() \ + \ +/* Is FILTER a subset of NAME? */ \ +static int greatest_name_match(const char *name, const char *filter, \ + int res_if_none) { \ + size_t offset = 0; \ + size_t filter_len = filter ? strlen(filter) : 0; \ + if (filter_len == 0) { return res_if_none; } /* no filter */ \ + while (name[offset] != '\0') { \ + if (name[offset] == filter[0]) { \ + if (0 == strncmp(&name[offset], filter, filter_len)) { \ + return 1; \ + } \ + } \ + offset++; \ + } \ + \ + return 0; \ +} \ + \ +/* Before running a test, check the name filtering and \ + * test shuffling state, if applicable, and then call setup hooks. */ \ +int greatest_test_pre(const char *name) { \ + struct greatest_run_info *g = &greatest_info; \ + int match = greatest_name_match(name, g->test_filter, 1) && \ + !greatest_name_match(name, g->test_exclude, 0); \ + if (GREATEST_LIST_ONLY()) { /* just listing test names */ \ + if (match) { fprintf(GREATEST_STDOUT, " %s\n", name); } \ + return 0; \ + } \ + if (match && (!GREATEST_FIRST_FAIL() || g->suite.failed == 0)) { \ + struct greatest_prng *p = &g->prng[1]; \ + if (p->random_order) { \ + p->count++; \ + if (!p->initialized || ((p->count - 1) != p->state)) { \ + return 0; /* don't run this test yet */ \ + } \ + } \ + GREATEST_SET_TIME(g->suite.pre_test); \ + if (g->setup) { g->setup(g->setup_udata); } \ + p->count_run++; \ + return 1; /* test should be run */ \ + } else { \ + return 0; /* skipped */ \ + } \ +} \ + \ +void greatest_test_post(const char *name, int res) { \ + GREATEST_SET_TIME(greatest_info.suite.post_test); \ + if (greatest_info.teardown) { \ + void *udata = greatest_info.teardown_udata; \ + greatest_info.teardown(udata); \ + } \ + \ + if (res <= GREATEST_TEST_RES_FAIL) { \ + greatest_do_fail(name); \ + } else if (res >= GREATEST_TEST_RES_SKIP) { \ + greatest_do_skip(name); \ + } else if (res == GREATEST_TEST_RES_PASS) { \ + greatest_do_pass(name); \ + } \ + greatest_info.suite.tests_run++; \ + greatest_info.col++; \ + if (GREATEST_IS_VERBOSE()) { \ + GREATEST_CLOCK_DIFF(greatest_info.suite.pre_test, \ + greatest_info.suite.post_test); \ + GREATEST_FPRINTF(GREATEST_STDOUT, "\n"); \ + } else if (greatest_info.col % greatest_info.width == 0) { \ + GREATEST_FPRINTF(GREATEST_STDOUT, "\n"); \ + greatest_info.col = 0; \ + } \ + fflush(GREATEST_STDOUT); \ +} \ + \ +static void report_suite(void) { \ + if (greatest_info.suite.tests_run > 0) { \ + GREATEST_FPRINTF(GREATEST_STDOUT, \ + "\n%u test%s - %u passed, %u failed, %u skipped", \ + greatest_info.suite.tests_run, \ + greatest_info.suite.tests_run == 1 ? "" : "s", \ + greatest_info.suite.passed, \ + greatest_info.suite.failed, \ + greatest_info.suite.skipped); \ + GREATEST_CLOCK_DIFF(greatest_info.suite.pre_suite, \ + greatest_info.suite.post_suite); \ + GREATEST_FPRINTF(GREATEST_STDOUT, "\n"); \ + } \ +} \ + \ +static void update_counts_and_reset_suite(void) { \ + greatest_info.setup = NULL; \ + greatest_info.setup_udata = NULL; \ + greatest_info.teardown = NULL; \ + greatest_info.teardown_udata = NULL; \ + greatest_info.passed += greatest_info.suite.passed; \ + greatest_info.failed += greatest_info.suite.failed; \ + greatest_info.skipped += greatest_info.suite.skipped; \ + greatest_info.tests_run += greatest_info.suite.tests_run; \ + memset(&greatest_info.suite, 0, sizeof(greatest_info.suite)); \ + greatest_info.col = 0; \ +} \ + \ +int greatest_suite_pre(const char *suite_name) { \ + struct greatest_prng *p = &greatest_info.prng[0]; \ + if (!greatest_name_match(suite_name, greatest_info.suite_filter, 1) \ + || (GREATEST_FIRST_FAIL() && greatest_info.failed > 0)) { \ + return 0; \ + } \ + if (p->random_order) { \ + p->count++; \ + if (!p->initialized || ((p->count - 1) != p->state)) { \ + return 0; /* don't run this suite yet */ \ + } \ + } \ + p->count_run++; \ + update_counts_and_reset_suite(); \ + GREATEST_FPRINTF(GREATEST_STDOUT, "\n* Suite %s:\n", suite_name); \ + GREATEST_SET_TIME(greatest_info.suite.pre_suite); \ + return 1; \ +} \ + \ +void greatest_suite_post(void) { \ + GREATEST_SET_TIME(greatest_info.suite.post_suite); \ + report_suite(); \ +} \ + \ +static void greatest_run_suite(greatest_suite_cb *suite_cb, \ + const char *suite_name) { \ + if (greatest_suite_pre(suite_name)) { \ + suite_cb(); \ + greatest_suite_post(); \ + } \ +} \ + \ +void greatest_do_pass(const char *name) { \ + if (GREATEST_IS_VERBOSE()) { \ + GREATEST_FPRINTF(GREATEST_STDOUT, "PASS %s: %s", \ + name, greatest_info.msg ? greatest_info.msg : ""); \ + } else { \ + GREATEST_FPRINTF(GREATEST_STDOUT, "."); \ + } \ + greatest_info.suite.passed++; \ +} \ + \ +void greatest_do_fail(const char *name) { \ + if (GREATEST_IS_VERBOSE()) { \ + GREATEST_FPRINTF(GREATEST_STDOUT, \ + "FAIL %s: %s (%s:%u)", \ + name, greatest_info.msg ? greatest_info.msg : "", \ + greatest_info.fail_file, greatest_info.fail_line); \ + } else { \ + GREATEST_FPRINTF(GREATEST_STDOUT, "F"); \ + greatest_info.col++; \ + /* add linebreak if in line of '.'s */ \ + if (greatest_info.col != 0) { \ + GREATEST_FPRINTF(GREATEST_STDOUT, "\n"); \ + greatest_info.col = 0; \ + } \ + GREATEST_FPRINTF(GREATEST_STDOUT, "FAIL %s: %s (%s:%u)\n", \ + name, \ + greatest_info.msg ? greatest_info.msg : "", \ + greatest_info.fail_file, greatest_info.fail_line); \ + } \ + greatest_info.suite.failed++; \ +} \ + \ +void greatest_do_skip(const char *name) { \ + if (GREATEST_IS_VERBOSE()) { \ + GREATEST_FPRINTF(GREATEST_STDOUT, "SKIP %s: %s", \ + name, \ + greatest_info.msg ? \ + greatest_info.msg : "" ); \ + } else { \ + GREATEST_FPRINTF(GREATEST_STDOUT, "s"); \ + } \ + greatest_info.suite.skipped++; \ +} \ + \ +int greatest_do_assert_equal_t(const void *exp, const void *got, \ + greatest_type_info *type_info, void *udata) { \ + int eq = 0; \ + if (type_info == NULL || type_info->equal == NULL) { \ + return 0; \ + } \ + eq = type_info->equal(exp, got, udata); \ + if (!eq) { \ + if (type_info->print != NULL) { \ + GREATEST_FPRINTF(GREATEST_STDOUT, "\nExpected: "); \ + (void)type_info->print(exp, udata); \ + GREATEST_FPRINTF(GREATEST_STDOUT, "\n Got: "); \ + (void)type_info->print(got, udata); \ + GREATEST_FPRINTF(GREATEST_STDOUT, "\n"); \ + } else { \ + GREATEST_FPRINTF(GREATEST_STDOUT, \ + "GREATEST_ASSERT_EQUAL_T failure at %s:%u\n", \ + greatest_info.fail_file, \ + greatest_info.fail_line); \ + } \ + } \ + return eq; \ +} \ + \ +void greatest_usage(const char *name) { \ + GREATEST_FPRINTF(GREATEST_STDOUT, \ + "Usage: %s [--help] [-hlfv] [-s SUITE] [-t TEST]\n" \ + " -h, --help print this Help\n" \ + " -l List suites and tests, then exit (dry run)\n" \ + " -f Stop runner after first failure\n" \ + " -v Verbose output\n" \ + " -s SUITE only run suites containing string SUITE\n" \ + " -t TEST only run tests containing string TEST\n" \ + " -x EXCLUDE exclude tests containing string EXCLUDE\n", \ + name); \ +} \ + \ +static void greatest_parse_options(int argc, char **argv) { \ + int i = 0; \ + for (i = 1; i < argc; i++) { \ + if (argv[i][0] == '-') { \ + char f = argv[i][1]; \ + if ((f == 's' || f == 't' || f == 'x') && argc <= i + 1) { \ + greatest_usage(argv[0]); exit(EXIT_FAILURE); \ + } \ + switch (f) { \ + case 's': /* suite name filter */ \ + greatest_set_suite_filter(argv[i + 1]); i++; break; \ + case 't': /* test name filter */ \ + greatest_set_test_filter(argv[i + 1]); i++; break; \ + case 'x': /* test name exclusion */ \ + greatest_set_test_exclude(argv[i + 1]); i++; break; \ + case 'f': /* first fail flag */ \ + greatest_stop_at_first_fail(); break; \ + case 'l': /* list only (dry run) */ \ + greatest_info.flags |= GREATEST_FLAG_LIST_ONLY; break; \ + case 'v': /* first fail flag */ \ + greatest_info.verbosity++; break; \ + case 'h': /* help */ \ + greatest_usage(argv[0]); exit(EXIT_SUCCESS); \ + case '-': \ + if (0 == strncmp("--help", argv[i], 6)) { \ + greatest_usage(argv[0]); exit(EXIT_SUCCESS); \ + } else if (0 == strncmp("--", argv[i], 2)) { \ + return; /* ignore following arguments */ \ + } /* fall through */ \ + default: \ + GREATEST_FPRINTF(GREATEST_STDOUT, \ + "Unknown argument '%s'\n", argv[i]); \ + greatest_usage(argv[0]); \ + exit(EXIT_FAILURE); \ + } \ + } \ + } \ +} \ + \ +int greatest_all_passed(void) { return (greatest_info.failed == 0); } \ + \ +void greatest_set_test_filter(const char *filter) { \ + greatest_info.test_filter = filter; \ +} \ + \ +void greatest_set_test_exclude(const char *filter) { \ + greatest_info.test_exclude = filter; \ +} \ + \ +void greatest_set_suite_filter(const char *filter) { \ + greatest_info.suite_filter = filter; \ +} \ + \ +void greatest_stop_at_first_fail(void) { \ + greatest_info.flags |= GREATEST_FLAG_FIRST_FAIL; \ +} \ + \ +void greatest_get_report(struct greatest_report_t *report) { \ + if (report) { \ + report->passed = greatest_info.passed; \ + report->failed = greatest_info.failed; \ + report->skipped = greatest_info.skipped; \ + report->assertions = greatest_info.assertions; \ + } \ +} \ + \ +unsigned int greatest_get_verbosity(void) { \ + return greatest_info.verbosity; \ +} \ + \ +void greatest_set_verbosity(unsigned int verbosity) { \ + greatest_info.verbosity = (unsigned char)verbosity; \ +} \ + \ +void greatest_set_flag(greatest_flag_t flag) { \ + greatest_info.flags |= flag; \ +} \ + \ +void GREATEST_SET_SETUP_CB(greatest_setup_cb *cb, void *udata) { \ + greatest_info.setup = cb; \ + greatest_info.setup_udata = udata; \ +} \ + \ +void GREATEST_SET_TEARDOWN_CB(greatest_teardown_cb *cb, \ + void *udata) { \ + greatest_info.teardown = cb; \ + greatest_info.teardown_udata = udata; \ +} \ + \ +static int greatest_string_equal_cb(const void *exp, const void *got, \ + void *udata) { \ + size_t *size = (size_t *)udata; \ + return (size != NULL \ + ? (0 == strncmp((const char *)exp, (const char *)got, *size)) \ + : (0 == strcmp((const char *)exp, (const char *)got))); \ +} \ + \ +static int greatest_string_printf_cb(const void *t, void *udata) { \ + (void)udata; /* note: does not check \0 termination. */ \ + return GREATEST_FPRINTF(GREATEST_STDOUT, "%s", (const char *)t); \ +} \ + \ +greatest_type_info greatest_type_info_string = { \ + greatest_string_equal_cb, \ + greatest_string_printf_cb, \ +}; \ + \ +static int greatest_memory_equal_cb(const void *exp, const void *got, \ + void *udata) { \ + greatest_memory_cmp_env *env = (greatest_memory_cmp_env *)udata; \ + return (0 == memcmp(exp, got, env->size)); \ +} \ + \ +/* Hexdump raw memory, with differences highlighted */ \ +static int greatest_memory_printf_cb(const void *t, void *udata) { \ + greatest_memory_cmp_env *env = (greatest_memory_cmp_env *)udata; \ + const unsigned char *buf = (const unsigned char *)t; \ + unsigned char diff_mark = ' '; \ + FILE *out = GREATEST_STDOUT; \ + size_t i, line_i, line_len = 0; \ + int len = 0; /* format hexdump with differences highlighted */ \ + for (i = 0; i < env->size; i+= line_len) { \ + diff_mark = ' '; \ + line_len = env->size - i; \ + if (line_len > 16) { line_len = 16; } \ + for (line_i = i; line_i < i + line_len; line_i++) { \ + if (env->exp[line_i] != env->got[line_i]) diff_mark = 'X'; \ + } \ + len += GREATEST_FPRINTF(out, "\n%04x %c ", \ + (unsigned int)i, diff_mark); \ + for (line_i = i; line_i < i + line_len; line_i++) { \ + int m = env->exp[line_i] == env->got[line_i]; /* match? */ \ + len += GREATEST_FPRINTF(out, "%02x%c", \ + buf[line_i], m ? ' ' : '<'); \ + } \ + for (line_i = 0; line_i < 16 - line_len; line_i++) { \ + len += GREATEST_FPRINTF(out, " "); \ + } \ + GREATEST_FPRINTF(out, " "); \ + for (line_i = i; line_i < i + line_len; line_i++) { \ + unsigned char c = buf[line_i]; \ + len += GREATEST_FPRINTF(out, "%c", isprint(c) ? c : '.'); \ + } \ + } \ + len += GREATEST_FPRINTF(out, "\n"); \ + return len; \ +} \ + \ +void greatest_prng_init_first_pass(int id) { \ + greatest_info.prng[id].random_order = 1; \ + greatest_info.prng[id].count_run = 0; \ +} \ + \ +int greatest_prng_init_second_pass(int id, unsigned long seed) { \ + static unsigned long primes[] = { 11, 101, 1009, 10007, \ + 100003, 1000003, 10000019, 100000007, 1000000007, \ + 1538461, 1865471, 17471, 2147483647 /* 2**32 - 1 */, }; \ + struct greatest_prng *prng = &greatest_info.prng[id]; \ + if (prng->count == 0) { return 0; } \ + prng->mod = 1; \ + prng->count_ceil = prng->count; \ + while (prng->mod < prng->count) { prng->mod <<= 1; } \ + prng->state = seed & 0x1fffffff; /* only use lower 29 bits... */ \ + prng->a = (4LU * prng->state) + 1; /* to avoid overflow */ \ + prng->c = primes[(seed * 16451) % sizeof(primes)/sizeof(primes[0])];\ + prng->initialized = 1; \ + return 1; \ +} \ + \ +/* Step the pseudorandom number generator until its state reaches \ + * another test ID between 0 and the test count. \ + * This use a linear congruential pseudorandom number generator, \ + * with the power-of-two ceiling of the test count as the modulus, the \ + * masked seed as the multiplier, and a prime as the increment. For \ + * each generated value < the test count, run the corresponding test. \ + * This will visit all IDs 0 <= X < mod once before repeating, \ + * with a starting position chosen based on the initial seed. \ + * For details, see: Knuth, The Art of Computer Programming \ + * Volume. 2, section 3.2.1. */ \ +void greatest_prng_step(int id) { \ + struct greatest_prng *p = &greatest_info.prng[id]; \ + do { \ + p->state = ((p->a * p->state) + p->c) & (p->mod - 1); \ + } while (p->state >= p->count_ceil); \ +} \ + \ +greatest_type_info greatest_type_info_memory = { \ + greatest_memory_equal_cb, \ + greatest_memory_printf_cb, \ +}; \ + \ +greatest_run_info greatest_info + +/* Init internals. */ +#define GREATEST_INIT() \ + do { \ + /* Suppress unused function warning if features aren't used */ \ + (void)greatest_run_suite; \ + (void)greatest_parse_options; \ + (void)greatest_prng_step; \ + (void)greatest_prng_init_first_pass; \ + (void)greatest_prng_init_second_pass; \ + \ + memset(&greatest_info, 0, sizeof(greatest_info)); \ + greatest_info.width = GREATEST_DEFAULT_WIDTH; \ + GREATEST_SET_TIME(greatest_info.begin); \ + } while (0) \ + +/* Handle command-line arguments, etc. */ +#define GREATEST_MAIN_BEGIN() \ + do { \ + GREATEST_INIT(); \ + greatest_parse_options(argc, argv); \ + } while (0) + +/* Report passes, failures, skipped tests, the number of + * assertions, and the overall run time. */ +#define GREATEST_PRINT_REPORT() \ + do { \ + if (!GREATEST_LIST_ONLY()) { \ + update_counts_and_reset_suite(); \ + GREATEST_SET_TIME(greatest_info.end); \ + GREATEST_FPRINTF(GREATEST_STDOUT, \ + "\nTotal: %u test%s", \ + greatest_info.tests_run, \ + greatest_info.tests_run == 1 ? "" : "s"); \ + GREATEST_CLOCK_DIFF(greatest_info.begin, \ + greatest_info.end); \ + GREATEST_FPRINTF(GREATEST_STDOUT, ", %u assertion%s\n", \ + greatest_info.assertions, \ + greatest_info.assertions == 1 ? "" : "s"); \ + GREATEST_FPRINTF(GREATEST_STDOUT, \ + "Pass: %u, fail: %u, skip: %u.\n", \ + greatest_info.passed, \ + greatest_info.failed, greatest_info.skipped); \ + } \ + } while (0) + +/* Report results, exit with exit status based on results. */ +#define GREATEST_MAIN_END() \ + do { \ + GREATEST_PRINT_REPORT(); \ + return (greatest_all_passed() ? EXIT_SUCCESS : EXIT_FAILURE); \ + } while (0) + +/* Make abbreviations without the GREATEST_ prefix for the + * most commonly used symbols. */ +#if GREATEST_USE_ABBREVS +#define TEST GREATEST_TEST +#define SUITE GREATEST_SUITE +#define SUITE_EXTERN GREATEST_SUITE_EXTERN +#define RUN_TEST GREATEST_RUN_TEST +#define RUN_TEST1 GREATEST_RUN_TEST1 +#define RUN_SUITE GREATEST_RUN_SUITE +#define IGNORE_TEST GREATEST_IGNORE_TEST +#define ASSERT GREATEST_ASSERT +#define ASSERTm GREATEST_ASSERTm +#define ASSERT_FALSE GREATEST_ASSERT_FALSE +#define ASSERT_EQ GREATEST_ASSERT_EQ +#define ASSERT_EQ_FMT GREATEST_ASSERT_EQ_FMT +#define ASSERT_IN_RANGE GREATEST_ASSERT_IN_RANGE +#define ASSERT_EQUAL_T GREATEST_ASSERT_EQUAL_T +#define ASSERT_STR_EQ GREATEST_ASSERT_STR_EQ +#define ASSERT_STRN_EQ GREATEST_ASSERT_STRN_EQ +#define ASSERT_MEM_EQ GREATEST_ASSERT_MEM_EQ +#define ASSERT_ENUM_EQ GREATEST_ASSERT_ENUM_EQ +#define ASSERT_FALSEm GREATEST_ASSERT_FALSEm +#define ASSERT_EQm GREATEST_ASSERT_EQm +#define ASSERT_EQ_FMTm GREATEST_ASSERT_EQ_FMTm +#define ASSERT_IN_RANGEm GREATEST_ASSERT_IN_RANGEm +#define ASSERT_EQUAL_Tm GREATEST_ASSERT_EQUAL_Tm +#define ASSERT_STR_EQm GREATEST_ASSERT_STR_EQm +#define ASSERT_STRN_EQm GREATEST_ASSERT_STRN_EQm +#define ASSERT_MEM_EQm GREATEST_ASSERT_MEM_EQm +#define ASSERT_ENUM_EQm GREATEST_ASSERT_ENUM_EQm +#define PASS GREATEST_PASS +#define FAIL GREATEST_FAIL +#define SKIP GREATEST_SKIP +#define PASSm GREATEST_PASSm +#define FAILm GREATEST_FAILm +#define SKIPm GREATEST_SKIPm +#define SET_SETUP GREATEST_SET_SETUP_CB +#define SET_TEARDOWN GREATEST_SET_TEARDOWN_CB +#define CHECK_CALL GREATEST_CHECK_CALL +#define SHUFFLE_TESTS GREATEST_SHUFFLE_TESTS +#define SHUFFLE_SUITES GREATEST_SHUFFLE_SUITES + +#ifdef GREATEST_VA_ARGS +#define RUN_TESTp GREATEST_RUN_TESTp +#endif + +#if GREATEST_USE_LONGJMP +#define ASSERT_OR_LONGJMP GREATEST_ASSERT_OR_LONGJMP +#define ASSERT_OR_LONGJMPm GREATEST_ASSERT_OR_LONGJMPm +#define FAIL_WITH_LONGJMP GREATEST_FAIL_WITH_LONGJMP +#define FAIL_WITH_LONGJMPm GREATEST_FAIL_WITH_LONGJMPm +#endif + +#endif /* USE_ABBREVS */ + +#if defined(__cplusplus) && !defined(GREATEST_NO_EXTERN_CPLUSPLUS) +} +#endif + +#endif diff --git a/tests/pymodbus_client.py b/tests/pymodbus_client.py new file mode 100755 index 0000000..85d5413 --- /dev/null +++ b/tests/pymodbus_client.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python +#https://pymodbus.readthedocs.io/en/latest/examples/synchronous-client.html + +#---------------------------------------------------------------------------# +# import the various server implementations +#---------------------------------------------------------------------------# +from pymodbus.client.sync import ModbusTcpClient +from pymodbus.client.sync import ModbusUdpClient +from pymodbus.client.sync import ModbusSerialClient + +#with ModbusSerialClient(method='rtu', port='/tmp/pts1', timeout=1, baudrate=19200) as client: +PORT_WITHOUT_ROOT_REQ = 1025 +with ModbusTcpClient('127.0.0.1', port=PORT_WITHOUT_ROOT_REQ) as client: + result = client.read_holding_registers(1, 8, unit=1) + print result.registers \ No newline at end of file