From 3d537f9645492b958c7a5eab98d660685f339244 Mon Sep 17 00:00:00 2001 From: Andrew Bettison Date: Wed, 28 May 2014 17:40:47 +0930 Subject: [PATCH] Improve test framework: test coverage support --- .gitignore | 15 +-- INSTALL.md | 44 ++++--- Makefile.in | 144 +++++++++++++++-------- commandline.c | 48 +++++--- doc/README.md | 6 + doc/Testing.md | 294 +++++++++++++++++++++++++++++++++++++++++++++++ log.c | 9 +- testframework.sh | 142 ++++++++++++++++++++++- 8 files changed, 611 insertions(+), 91 deletions(-) create mode 100644 doc/Testing.md diff --git a/.gitignore b/.gitignore index 9d784755..aba71bd8 100644 --- a/.gitignore +++ b/.gitignore @@ -10,16 +10,15 @@ *.suo .*.sw? VERSION.txt -configure -autom4te.cache +/configure +/autom4te.cache Makefile testconfig.sh config.log config.status -nacl/nacl-20110221/build -nacl/naclinc.txt -nacl/nacllib.txt -serval.c +/nacl/nacl-20110221/build +/nacl/naclinc.txt +/nacl/nacllib.txt /servald /directory_service /tfw_createfile @@ -28,4 +27,6 @@ serval.c /simulator *.so test.*.log -testlog +/testlog +/coverage_html +*.info diff --git a/INSTALL.md b/INSTALL.md index 46f51925..cfa8a672 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -1,6 +1,6 @@ Serval DNA Build and Test ========================= -[Serval Project], March 2013 +[Serval Project][], March 2013 Supported Architectures ----------------------- @@ -64,7 +64,8 @@ Test dependencies: Build ----- -To compile Serval DNA from source, run the following commands: +To compile a native (ie, not cross-compiled) Serval DNA from source, run the +following commands: $ cd $HOME/src/serval-dna $ autoreconf -f -i @@ -93,6 +94,9 @@ A successful session should appear something like: CC nacl/src/crypto_auth_hmacsha256_ref/verify.c CC nacl/src/crypto_auth_hmacsha512256_ref/hmac.c ... + CC cli.c + CC commandline.c + ... CC xprintf.c LINK servald LINK libmonitorclient.so @@ -117,7 +121,7 @@ Built artifacts The build process produces the following artifacts: -* **servald** is the main executable. +* **servald** is the main Serval DNA executable. * **libservald.so** is a shared library built only for Android, which is linked into the [batphone][] Java executable at run time to provide the [JNI][] @@ -131,17 +135,23 @@ The build process produces the following artifacts: entry points to functions for managing the client end of a monitor connection with the servald daemon. +* **fakeradio** is a utility used by test scripts to simulate the serial + interface to the [RFD900][] packet radio used in the [Serval Mesh Extender][] + +* **simulator** is a utility used by test scripts for simulating wireless + packet transmission under different conditions. + +* **tfw_createfile** is a utility needed by test scripts for creating large + data files with unique, non-repeating content. + +* **config_test** is a utility that will fail to link if any external + dependencies creep into the configuration subsystem. + Test scripts ------------ -The scripts in the [tests](./tests/) directory require [Bash][] version 3.2.48 -or later. To run tests, simply build a native `servald` executable then invoke -the test script. Each test case is executed in its own self-contained -temporary directory with its own set-up and configuration, so there is no need -to configure anything or clean up afterwards. - -For example, the following command runs all the tests except long-running, -resource-hungry “stress” tests: +After building the native `servald` executable, run all the tests with the +following command: $ ./tests/all 1 [PASS.] (logging) By default, only errors and warnings are logged to stderr @@ -156,13 +166,12 @@ resource-hungry “stress” tests: 161 tests, 161 pass, 0 fail, 0 error $ -There are options to run tests concurrently for faster results, and to select -subsets of test cases. To see the options, give the `--help` option: +Every test run writes log files into the [testlog/all](./testlog/all/) +directory (relative to the current working directory), deleting any logs from +the previous run. - $ ./tests/all --help - -Every test run writes its log files into the [testlog/all](./testlog/all/) -directory, deleting all logs from the previous run. +See [Serval DNA Testing](./doc/Testing.md) for more information on running and +developing test scripts. Configure --------- @@ -201,6 +210,7 @@ This document is available under the [Creative Commons Attribution 4.0 Internati [gcc 4.7]: http://gcc.gnu.org/gcc-4.7/ [OpenWRT]: ./doc/OpenWRT.md [Serval Mesh Extender]: http://developer.servalproject.org/dokuwiki/doku.php?id=content:meshextender: +[RFD900]: http://rfdesign.com.au/index.php/rfd900 [Mesh Potato]: http://villagetelco.org/mesh-potato/ [Commotion Wireless]: http://commotionwireless.net/ [JNI]: http://en.wikipedia.org/wiki/Java_Native_Interface diff --git a/Makefile.in b/Makefile.in index 340fedb2..d86e5f6d 100644 --- a/Makefile.in +++ b/Makefile.in @@ -1,3 +1,5 @@ +# Makefile.in for Serval DNA +# vim: noet ts=8 sts=0 sw=8 prefix=@prefix@ exec_prefix=@exec_prefix@ bindir=@bindir@ @@ -10,27 +12,35 @@ include $(NACL_BASE)/nacl.mk include sourcefiles.mk include headerfiles.mk +OBJSDIR_SERVALD = objs_servald +OBJSDIR_LIB = objs_lib +OBJSDIR_TOOLS = objs + +OBJSDIRS = $(OBJSDIR_SERVALD) $(OBJSDIR_LIB) $(OBJSDIR_TOOLS) + SERVAL_DAEMON_OBJS = \ - $(addprefix objs_servald/, $(SERVAL_CLIENT_SOURCES:.c=.o)) \ - $(addprefix objs_servald/, $(MDP_CLIENT_SOURCES:.c=.o)) \ - $(addprefix objs_servald/, $(SERVAL_DAEMON_SOURCES:.c=.o)) + $(addprefix $(OBJSDIR_SERVALD)/, $(SERVAL_CLIENT_SOURCES:.c=.o)) \ + $(addprefix $(OBJSDIR_SERVALD)/, $(MDP_CLIENT_SOURCES:.c=.o)) \ + $(addprefix $(OBJSDIR_SERVALD)/, $(SERVAL_DAEMON_SOURCES:.c=.o)) SERVALD_OBJS = \ - $(addprefix objs_servald/, $(notdir $(SQLITE3_SOURCES:.c=.o))) \ - $(addprefix objs_servald/, $(NACL_SOURCES:.c=.o)) \ - $(SERVAL_DAEMON_OBJS) + $(addprefix $(OBJSDIR_SERVALD)/, $(notdir $(SQLITE3_SOURCES:.c=.o))) \ + $(addprefix $(OBJSDIR_SERVALD)/, $(NACL_SOURCES:.c=.o)) \ + $(SERVAL_DAEMON_OBJS) LIB_SERVAL_OBJS = \ - $(addprefix objs_lib/, $(SERVAL_CLIENT_SOURCES:.c=.o)) \ - $(addprefix objs_lib/, $(SERVAL_LIB_SOURCES:.c=.o)) \ - $(addprefix objs_lib/, $(MDP_CLIENT_SOURCES:.c=.o)) + $(addprefix $(OBJSDIR_LIB)/, $(SERVAL_CLIENT_SOURCES:.c=.o)) \ + $(addprefix $(OBJSDIR_LIB)/, $(SERVAL_LIB_SOURCES:.c=.o)) \ + $(addprefix $(OBJSDIR_LIB)/, $(MDP_CLIENT_SOURCES:.c=.o)) MONITOR_CLIENT_OBJS = \ - $(addprefix objs_lib/, $(SERVAL_CLIENT_SOURCES:.c=.o)) \ - $(addprefix objs_lib/, $(MONITOR_CLIENT_SRCS:.c=.o)) + $(addprefix $(OBJSDIR_LIB)/, $(SERVAL_CLIENT_SOURCES:.c=.o)) \ + $(addprefix $(OBJSDIR_LIB)/, $(MONITOR_CLIENT_SRCS:.c=.o)) SIMULATOR_OBJS = \ - $(addprefix objs/, $(SIMULATOR_SOURCES:.c=.o)) + $(addprefix $(OBJSDIR_TOOLS)/, $(SIMULATOR_SOURCES:.c=.o)) + +CC= @CC@ LDFLAGS=@LDFLAGS@ @LIBS@ @PTHREAD_LIBS@ -CFLAGS= -Isqlite-amalgamation-3070900 @CPPFLAGS@ @CFLAGS@ @PTHREAD_CFLAGS@ -Inacl/include +CFLAGS= -Isqlite-amalgamation-3070900 @CPPFLAGS@ @CFLAGS@ @PTHREAD_CFLAGS@ -Inacl/include CFLAGS+=-DSYSCONFDIR="\"$(sysconfdir)\"" -DLOCALSTATEDIR="\"$(localstatedir)\"" CFLAGS+=-fPIC CFLAGS+=-Wall -Wno-unused-value -Werror @@ -58,41 +68,81 @@ all: servald libmonitorclient.so libmonitorclient.a test test: tfw_createfile directory_service fakeradio config_test simulator +covzero: | is_built_with_coverage + @echo REMOVE all .gcda files + @find $(OBJSDIRS) -type f -name '*.gcda' -print0 | xargs -0 $(RM) + +covinit: servald-initial.info + +covhtml: coverage_html/index.html + +is_built_with_coverage: + @for obj in $(SERVALD_OBJS); do \ + gcno="$${obj%.o}.gcno" ;\ + if [ ! -r "$$gcno" ]; then \ + echo "ERROR: servald has not been compiled for code coverage; missing $$gcno" ;\ + exit 1 ;\ + fi ;\ + done + +has_coverage_data: | is_built_with_coverage + @count=0; for obj in $(SERVALD_OBJS); do \ + gcda="$${obj%.o}.gcda" ;\ + [ -s "$$gcda" ] && count=$$(($$count + 1)) ;\ + done ;\ + if [ $$count -eq 0 ]; then \ + echo "ERROR: no code coverage data; run some tests" ;\ + exit 1 ;\ + fi + +servald-initial.info: Makefile servald | is_built_with_coverage + geninfo --quiet --initial --checksum --base-directory=$(abspath .) --no-external $(OBJSDIR_SERVALD) -o $@ + +servald-coverage.info: Makefile servald $(shell find $(OBJSDIR_SERVALD) -type f -name '*.gcda') | has_coverage_data + geninfo --quiet --checksum --base-directory=$(abspath .) --no-external $(OBJSDIR_SERVALD) -o $@ 2>&1 | { grep -v 'WARNING: no data found for .*\.h$$' || true; } + @[ -s $@ ] + +coverage_html/index.html: Makefile servald-initial.info servald-coverage.info + $(RM) -r coverage_html + genhtml --quiet servald-initial.info servald-coverage.info -o coverage_html + Makefile: $(wildcard Makefile.in) $(wildcard configure) $(warning Makefile may be out of date, please run ./configure) configure: $(wildcard configure.in) $(warning configure may be out of date, please run autoreconf -f -i) -objs/version.o: $(SERVALD_OBJS) version_string.sh $(wildcard VERSION.txt) COPYRIGHT.txt Makefile +$(OBJSDIR_TOOLS)/version.o: $(SERVALD_OBJS) version_string.sh $(wildcard VERSION.txt) COPYRIGHT.txt Makefile @echo CC version_servald.c + @$(RM) $(@:.o=.gcno) $(@:.o=.gcda) @V=`./version_string.sh --ignore-untracked` \ && C="`sed -e :a -e N -e '$$!ba' -e 's/[\\\\"]/\\\\&/g' -e 's/\\n/\\\\n/g' COPYRIGHT.txt`" \ && $(CC) -c version_servald.c -o $@ -DSERVALD_VERSION="\"$$V\"" -DSERVALD_COPYRIGHT="\"$$C\"" #' <-- fixes vim syntax highlighting -objs_servald/sqlite3.o: sqlite-amalgamation-3070900/sqlite3.c Makefile +$(OBJSDIR_SERVALD)/sqlite3.o: sqlite-amalgamation-3070900/sqlite3.c Makefile @echo SERVALD CC $< @mkdir -p $(dir $@) + @$(RM) $(@:.o=.gcno) $(@:.o=.gcda) @$(CC) $(CFLAGS) $(DEFS) -c $< -o $@ # No object files in source directory! %.o: %.c -objs/%.o: %.c +$(OBJSDIR_TOOLS)/%.o: %.c @echo CC $< @mkdir -p $(dir $@) @$(RM) $(@:.o=.gcno) $(@:.o=.gcda) @$(CC) $(CFLAGS) $(DEFS) -c $< -o $@ -objs_servald/%.o: %.c +$(OBJSDIR_SERVALD)/%.o: %.c @echo SERVALD CC $< @mkdir -p $(dir $@) @$(RM) $(@:.o=.gcno) $(@:.o=.gcda) @$(CC) $(CFLAGS) $(DEFS) -c $< -o $@ -objs_lib/%.o: %.c +$(OBJSDIR_LIB)/%.o: %.c @echo LIB CC $< @mkdir -p $(dir $@) @$(RM) $(@:.o=.gcno) $(@:.o=.gcda) @@ -103,62 +153,62 @@ $(SERVALD_OBJS): Makefile $(LIB_SERVAL_OBJS): $(HDRS) Makefile $(MONITOR_CLIENT_OBJS): $(HDRS) Makefile $(SIMULATOR_OBJS): $(HDRS) Makefile -objs/directory_service.o objs/tfw_createfile.o objs/fakeradio.o \ -objs/config_test.o objs/conf_om.o objs/conf_schema.o objs/conf_parse.o libserval.a \ - : $(HDRS) Makefile +$(OBJSDIR_TOOLS)/directory_service.o $(OBJSDIR_TOOLS)/tfw_createfile.o $(OBJSDIR_TOOLS)/fakeradio.o \ +$(OBJSDIR_TOOLS)/config_test.o $(OBJSDIR_TOOLS)/conf_om.o $(OBJSDIR_TOOLS)/conf_schema.o $(OBJSDIR_TOOLS)/conf_parse.o libserval.a \ + : $(HDRS) Makefile -servald: $(SERVALD_OBJS) objs/version.o +servald: $(SERVALD_OBJS) $(OBJSDIR_TOOLS)/version.o @echo LINK $@ - @$(CC) $(CFLAGS) -Wall -o $@ $(SERVALD_OBJS) objs/version.o $(LDFLAGS) + @$(CC) $(CFLAGS) -Wall -o $@ $(SERVALD_OBJS) $(OBJSDIR_TOOLS)/version.o $(LDFLAGS) -libserval.a: $(LIB_SERVAL_OBJS) objs/version.o +libserval.a: $(LIB_SERVAL_OBJS) $(OBJSDIR_TOOLS)/version.o @echo AR $@ - @$(AR) -cr $@ $(LIB_SERVAL_OBJS) objs/version.o + @$(AR) -cr $@ $(LIB_SERVAL_OBJS) $(OBJSDIR_TOOLS)/version.o -directory_service: objs/directory_service.o libserval.a +directory_service: $(OBJSDIR_TOOLS)/directory_service.o libserval.a @echo LINK $@ - @$(CC) $(CFLAGS) -Wall -o $@ objs/directory_service.o libserval.a $(LDFLAGS) + @$(CC) $(CFLAGS) -Wall -o $@ $(OBJSDIR_TOOLS)/directory_service.o libserval.a $(LDFLAGS) -tfw_createfile: objs/tfw_createfile.o libserval.a +tfw_createfile: $(OBJSDIR_TOOLS)/tfw_createfile.o libserval.a @echo LINK $@ - @$(CC) $(CFLAGS) -Wall -o $@ objs/tfw_createfile.o libserval.a $(LDFLAGS) + @$(CC) $(CFLAGS) -Wall -o $@ $(OBJSDIR_TOOLS)/tfw_createfile.o libserval.a $(LDFLAGS) -fakeradio: objs/fakeradio.o libserval.a +fakeradio: $(OBJSDIR_TOOLS)/fakeradio.o libserval.a @echo LINK $@ - @$(CC) $(CFLAGS) -Wall -o $@ objs/fakeradio.o libserval.a $(LDFLAGS) + @$(CC) $(CFLAGS) -Wall -o $@ $(OBJSDIR_TOOLS)/fakeradio.o libserval.a $(LDFLAGS) simulator: $(SIMULATOR_OBJS) libserval.a @echo LINK $@ @$(CC) $(CFLAGS) -Wall -o $@ $(SIMULATOR_OBJS) libserval.a $(LDFLAGS) -config_test: objs/config_test.o objs/conf_om.o objs/conf_schema.o objs/conf_parse.o libserval.a +config_test: $(OBJSDIR_TOOLS)/config_test.o $(OBJSDIR_TOOLS)/conf_om.o $(OBJSDIR_TOOLS)/conf_schema.o $(OBJSDIR_TOOLS)/conf_parse.o libserval.a @echo LINK $@ - @$(CC) $(CFLAGS) -Wall -o $@ objs/config_test.o objs/conf_om.o objs/conf_schema.o objs/conf_parse.o libserval.a $(LDFLAGS) + @$(CC) $(CFLAGS) -Wall -o $@ $(OBJSDIR_TOOLS)/config_test.o $(OBJSDIR_TOOLS)/conf_om.o $(OBJSDIR_TOOLS)/conf_schema.o $(OBJSDIR_TOOLS)/conf_parse.o libserval.a $(LDFLAGS) copyright: @if [ -x "$(COPYRIGHT_TOOL)" ]; then \ - echo GENERATE COPYRIGHT.txt; \ - $(COPYRIGHT_TOOL) -o COPYRIGHT.txt condense *.c *.h; \ - else \ - echo 'sp-copyright-tool is not in $$PATH; COPYRIGHT.txt not updated'; \ - fi + echo GENERATE COPYRIGHT.txt; \ + $(COPYRIGHT_TOOL) -o COPYRIGHT.txt condense *.c *.h; \ + else \ + echo 'sp-copyright-tool is not in $$PATH; COPYRIGHT.txt not updated'; \ + fi findPATH = $(firstword $(wildcard $(addsuffix /$(1),$(subst :, ,$(PATH))))) COPYRIGHT_TOOL := $(call findPATH,sp-copyright-tool) # This does not build on 64 bit elf platforms as NaCL isn't built with -fPIC # DOC 20120615 -libserval.so: $(SERVALD_OBJS) objs/version.o +libserval.so: $(SERVALD_OBJS) $(OBJSDIR_TOOLS)/version.o @echo LINK $@ - @$(CC) $(CFLAGS) -Wall -shared -o $@ $(SERVALD_OBJS) objs/version.o $(LDFLAGS) + @$(CC) $(CFLAGS) -Wall -shared -o $@ $(SERVALD_OBJS) $(OBJSDIR_TOOLS)/version.o $(LDFLAGS) -libmonitorclient.so: $(MONITOR_CLIENT_OBJS) objs/version.o +libmonitorclient.so: $(MONITOR_CLIENT_OBJS) $(OBJSDIR_TOOLS)/version.o @echo LINK $@ - @$(CC) $(CFLAGS) -Wall -shared -o $@ $(MONITOR_CLIENT_OBJS) objs/version.o $(LDFLAGS) + @$(CC) $(CFLAGS) -Wall -shared -o $@ $(MONITOR_CLIENT_OBJS) $(OBJSDIR_TOOLS)/version.o $(LDFLAGS) -libmonitorclient.a: $(MONITOR_CLIENT_OBJS) objs/version.o +libmonitorclient.a: $(MONITOR_CLIENT_OBJS) $(OBJSDIR_TOOLS)/version.o @echo AR $@ - @$(AR) -cr $@ $(MONITOR_CLIENT_OBJS) objs/version.o + @$(AR) -cr $@ $(MONITOR_CLIENT_OBJS) $(OBJSDIR_TOOLS)/version.o install: servald $(INSTALL_PROGRAM) -D servald $(DESTDIR)$(sbindir)/servald @@ -167,8 +217,8 @@ uninstall: $(RM) $(DESTDIR)$(sbindir)/servald clean: - @$(RM) -r objs/* objs_servald/* objs_lib/* \ - servald libservald.so \ - libserval.a libmonitorclient.so libmonitorclient.a \ + @$(RM) -r $(OBJSDIRS:%=%/*) \ + servald libservald.so \ + libserval.a libmonitorclient.so libmonitorclient.a \ fakeradio \ tfw_createfile diff --git a/commandline.c b/commandline.c index 2290ba66..905e0733 100644 --- a/commandline.c +++ b/commandline.c @@ -880,32 +880,47 @@ int app_server_start(const struct cli_parsed *parsed, struct cli_context *contex goto exit; case 0: { /* Child process. Fork then exit, to disconnect daemon from parent process, so that - when daemon exits it does not live on as a zombie. N.B. Do not return from within this - process; that will unroll the JNI call stack and cause havoc. Use _exit(). */ + when daemon exits it does not live on as a zombie. N.B. On Android, do not return from + within this process; that will unroll the JNI call stack and cause havoc -- call _exit() + instead (not exit(), because we want to avoid any Java atexit(3) callbacks as well). If + _exit() is used on non-Android systems, then source code coverage does not get reported, + because it relies on an atexit() callback to write the accumulated counters into .gcda + files. */ +#ifdef ANDROID +# define EXIT_CHILD(n) _exit(n) +#else +# define EXIT_CHILD(n) exit(n) +#endif + // Ensure that all stdio streams are flushed before forking, so that if a child calls + // exit(), it will not result in any buffered output being written twice to the file + // descriptor. + fflush(stdout); + fflush(stderr); switch (fork()) { case -1: - exit(WHY_perror("fork")); + EXIT_CHILD(WHY_perror("fork")); case 0: { /* Grandchild process. Close logfile (so that it gets re-opened again on demand, with - our own file pointer), disable logging to stderr (about to get closed), disconnect - from current directory, disconnect standard I/O streams, and start a new process - session so that if we are being started by an adb shell session on an Android device, - then we don't receive a SIGHUP when the adb shell process ends. */ + our own file pointer), disable logging to stderr (about to get redirected to + /dev/null), disconnect from current directory, disconnect standard I/O streams, and + start a new process session so that if we are being started by an adb shell session + on an Android device, then we don't receive a SIGHUP when the adb shell process ends. + */ close_log_file(); disable_log_stderr(); int fd; if ((fd = open("/dev/null", O_RDWR, 0)) == -1) - _exit(WHY_perror("open(\"/dev/null\")")); + EXIT_CHILD(WHY_perror("open(\"/dev/null\")")); if (setsid() == -1) - _exit(WHY_perror("setsid")); + EXIT_CHILD(WHY_perror("setsid")); if (chdir(dir) == -1) - _exit(WHYF_perror("chdir(%s)", alloca_str_toprint(dir))); + EXIT_CHILD(WHYF_perror("chdir(%s)", alloca_str_toprint(dir))); if (dup2(fd, 0) == -1) - _exit(WHYF_perror("dup2(%d,0)", fd)); + EXIT_CHILD(WHYF_perror("dup2(%d,0)", fd)); if (dup2(fd, 1) == -1) - _exit(WHYF_perror("dup2(%d,1)", fd)); + EXIT_CHILD(WHYF_perror("dup2(%d,1)", fd)); if (dup2(fd, 2) == -1) - _exit(WHYF_perror("dup2(%d,2)", fd)); + EXIT_CHILD(WHYF_perror("dup2(%d,2)", fd)); if (fd > 2) (void)close(fd); /* The execpath option is provided so that a JNI call to "start" can be made which @@ -916,14 +931,15 @@ int app_server_start(const struct cli_parsed *parsed, struct cli_context *contex sentinal. */ execl(execpath, execpath, "start", "foreground", (void *)NULL); WHYF_perror("execl(%s,\"start\",\"foreground\")", alloca_str_toprint(execpath)); - _exit(-1); + EXIT_CHILD(-1); } - _exit(server()); + EXIT_CHILD(server()); // NOT REACHED } } // TODO wait for server_write_pid() to signal more directly? - _exit(0); // Main process is waitpid()-ing for this. + EXIT_CHILD(0); // Main process is waitpid()-ing for this. +#undef EXIT_CHILD } } /* Main process. Wait for the child process to fork the grandchild and exit. */ diff --git a/doc/README.md b/doc/README.md index 557938a3..f391fa0f 100644 --- a/doc/README.md +++ b/doc/README.md @@ -5,6 +5,12 @@ Serval DNA Technical Documentation This directory contains [technical documents][] that accompany the [Serval DNA][] component of the [Serval mesh network][]. + * [Building Serval DNA](../INSTALL.md) has instructions to build a native + Serval DNA executable. + + * [Testing Serval DNA](./Testing.md) describes the Serval DNA test scripts and + the test framework. + * [Configuring Serval DNA](./Servald-Configuration.md) describes the persistent configuration system and its command-line API, the built-in system file paths, daemon instances and basic network configuration. diff --git a/doc/Testing.md b/doc/Testing.md new file mode 100644 index 00000000..39d737f7 --- /dev/null +++ b/doc/Testing.md @@ -0,0 +1,294 @@ +Serval DNA Testing +================== +[Serval Project][], June 2014 + +[Serval DNA][] is tested using a suite of [test scripts](../tests/) written in +the [Bash][] shell scripting language, using the Serval Project's own [Bash +Test Framework][]. These scripts are [integration tests][] focussed on the +Serval DNA component and its external interfaces. + +Test Framework +-------------- + +The [Bash Test Framework][] performs common testing work, so that test +developers can focus on the specifics of their test cases and test cases +contain a minumum of [boilerplate code][]: + + * creates a temporary working directory to isolate each test case + * invokes each test case's set-up, test, finalise, and tear-down functions in + a defined order, guaranteeing to always call the latter two + * provides a rich set of assertion functions + * records the outcome of each test case: PASS, FAIL or ERROR + * records a detailed log of the execution of each test case + * removes temporary working directories and files after each test case + * kills any stray processes after each test case + * runs test cases in parallel if so directed + * reports progress during execution + +Some features that may be added in future are: + + * conformance with [Test Anything Protocol][TAP] + * support for a SKIP test outcome + * formal versioning of the Test Framework and parts of its API, to catch + incompatibilities between test scripts and Framework upgrades + +Prerequisites +------------- + +The [Bash Test Framework][] requires the following execution environment: + + * [Bash][] version 3.2.48 or later + * [GNU grep][] version 2.7 or later + * [GNU sed][] version 4.2 or later + * [GNU awk][] version 3.1 or later + * [pgrep][] and [pkill][] version 593 or later (Solaris) or from procps-ng 3.3 + or later (Linux) + +Before running any tests, all the executables and other artifacts under test +(ie, the **servald** executable), plus all test utilities, must be +[built](../INSTALL.md). + +Test scripts +------------ + +Executing a test script without any arguments causes it to run all the test +cases that it defines, one at a time. The script will terminate once all test +cases have been run, and its exit status will be zero only if all test cases +reported PASS. + +Every test script uses the [Bash Test Framework][] to parse its command line, +so the following options are supported by all test scripts: + + * __`-l`__ or __`--list`__ causes the script to print a list of all its test + cases on standard output, instead of executing them + + * __`-t`__ or __`--trace`__ sets the Bash `-x` option during execution of each + test case, which adds much more detail to the test logs + + * __`-v`__ or __`--verbose`__ causes test logs to be sent to standard output + during execution of the tests, so the developer can watch a test as it runs + (this version is incompatible with running tests in parallel) + + * __`-E`__ or __`--stop-on-error`__ causes the test script to stop running new + test cases as soon as any test reports ERROR, and to wait for currently + running test cases to finish + + * __`-F`__ or __`--stop-on-failure`__ causes the test script to stop running + new test cases as soon as any test reports FAIL, and to wait for currently + running test cases to finish + + * __`-j N`__ or __`--jobs=N`__ causes up to __N__ test cases to be run + concurrently, which can greatly speed the rate of completion of a large test + run, since most tests spend much of their time either sleeping or i/o bound + + * __`-f PREFIX`__ or __`--filter=PREFIX`__ causes only those test cases whose + names begin with __PREFIX__ to be executed + + * __`-f N`__ or __`--filter=N`__ causes only test case number __N__ to be + executed (test cases are numbered in the order they are defined in the + script) + + * __`-f M-N`__ or __`--filter=M-N`__ causes only test cases numbers __M__ + through to __N__ (inclusive) to be executed (test cases are numbered in the + order they are defined in the script); if __M__ is omitted then all cases up + to number __N__ are executed; if __N__ is omitted then all test cases from + number __M__ and above are executed + + * __`-f M,N,...`__ or __`--filter=M,N,...`__ causes only test cases __M__ and + __N__ (... etc.) to be executed (test cases are numbered in the order they + are defined in the script) + +There are other options as well. To see a complete and up-to-date summary, use +the __`--help`__ option: + + $ ./tests/all --help + +Aggregate scripts +----------------- + +Some test scripts simply aggregate other scripts, providing a convenient way to +execute many tests with a single command. Aggregate scripts behave in all +respects like a normal test script: the command line options and exit status +are the same. + +The most notable aggregate script is [tests/all](../tests/all), which runs all +available tests except long-running, resource-hungry “stress” tests: + + $ ./tests/all + 1 [PASS.] (logging) By default, only errors and warnings are logged to stderr + 2 [PASS.] (logging) Configure all messages logged to stderr + 3 [PASS.] (logging) Configure no messages logged to stderr + 4 [PASS.] (logging) By Default, all messages are appended to a configured file + ... + 158 [PASS.] (rhizomeprotocol) One way direct pull bundle from configured peer + 159 [PASS.] (rhizomeprotocol) Two-way direct sync bundles with configured peer + 160 [PASS.] (directory_service) Publish and retrieve a directory entry + 161 [PASS.] (directory_service) Ping via relay node + 161 tests, 161 pass, 0 fail, 0 error + $ + +Test logs +--------- + +All test scripts write their test logs into the `testlog` sub-directory +(relative to the current working directory), which has the following structure: + + ./testlog/ + SCRIPTNAME/ + 1.FirstTestCaseName.RESULT/ + log.txt + ... other files... + 2.SecondTestCaseName.RESULT/ + log.txt + ... other files... + SECONDSCRIPTNAME/ + 1.first_test_case_name.RESULT/ + log.txt + ... other files... + 2.second_test_case_name.RESULT/ + log.txt + ... other files... + ... more script directories... + +where `SCRIPTNAME` and `SECONDSCRIPTNAME` are the names of the test scripts, +`FirstTestCaseName`, `first_test_case_name`, etc. are the names of the tests +within those scripts, and `RESULT` is either `ERROR`, `FAIL` or `PASS`. An +aggregate test script writes logfiles for all the test cases it includes under +its own SCRIPTNAME, not under the names of the scripts it includes. + +Whenever a test script starts, it deletes its `testlog/SCRIPTNAME` directory +and all its contents, so the logs from previous runs are lost. + +Every test case produces a `log.txt` file, and may also produce other files to +assist diagnosis in case of failure or to supplement a pass result, eg, +performance statistics, code coverage data, network packet logs for +reproducibility. + +Source code coverage +-------------------- + +The [Bash Test Framework][] has command-line options to support per-test-case +[source code test coverage][] analysis using [GCC][] and [gcov(1)][]. An +aggregate coverage analysis can easily be generated with no special options to +test scripts. + +To generate code coverage information for [Serval DNA][], modify the standard +[build](../INSTALL.md) procedure by adding a CFLAGS argument to the +`./configure` step: + + ... + $ ./configure CFLAGS='-g -O0 --coverage' + $ make + ... + +This will generate one [GCNO][] file for every object file, in the same +directory as the object file. + +Once **servald** has been built using these flags, invoking it will generate +some [GCDA][] coverage data files, one per source file, in the same directory +as the [GCNO][] files. Repeated invocations will accumulate coverage data in +the same files. The environment variables `GCOV_PREFIX` and +`GCOV_PREFIX_STRIP` can be used to change the directory where the [GCDA][] data +files are written. + +### Aggregate code coverage + +To generate aggregate code coverage for a test run: + + $ make covzero + $ ./tests/all + ... + $ make covhtml + $ www-browser ./coverage_html/index.html + ... + +The coverage report will reflect exactly the accumulated coverage of all tests +run between `make covzero` and `make covhtml`. The above example runs all +tests (except stress tests) but any combination may be run, including manual +invocations of **servald**. The **servald** executable must be invoked at +least once after `make covzero`, or `make covhtml` will fail with an error, for +lack of coverage data. + +If more tests are run without invoking `make covzero`, then the coverage data +will sum with the existing coverage data since the last `make covzero`. + +### Per-test-case code coverage + +**Note**: Per-test-case coverage support is of very limited use because of +deficiencies in the coverage data processing utilities (see below). + +If the __`--coverage`__ option is given to a test script, then it sets the +`GCOV_PREFIX` and `GCOV_PREFIX_STRIP` environment variables while running each +test case, causing each case's generated [GCDA][] coverage data files to be +created under the case's own log directory: + + ./testlog/ + SCRIPTNAME/ + N.TestCaseName.RESULT/ + log.txt + gcov/ + home/username/src/serval-dna/objs_servald/cli.gcda + home/username/src/serval-dna/objs_servald/commandline.gcda + ... + home/username/src/serval-dna/objs_servald/nacl/src/crypto_auth_hmacsha256_ref/hmac.c + ... + +In theory, these per-test-case [GCDA][] data files could be merged to produce +coverage data for any desired combination of test cases, but there is currently +no command-line utility available to perform this merge. The code for merging +undoubtably exists in the *libgcov* [atexit(3)][] callback, which sums the +process's accumulated execution counts into any existing [GCDA][] files, but no +work has been done to extract this code into a utility. + +If the __`--geninfo`__ option is given (which implies `--coverage`), the test +framework will invoke [geninfo][] after each test case completes, to generate +one [lcov][] *tracefile* per case named `coverage.info` located in the case's +own log directory: + + ./testlog/ + SCRIPTNAME/ + N.TestCaseName.RESULT/ + log.txt + coverage.info + +**Note**: The `--geninfo` option must be accompanied by at least one +__`--gcno-dir=PATH`__ option, or the `TFW_GCNO_PATH` environment variable must +be set to a list of colon-separated directory paths. The test framework +recursively searches all these directories looking for [GCNO][] files, which it +then supplies to [geninfo][], which uses them to find the source files and +[GCDA][] files produced by `--coverage`. + +The per-test-case tracefiles produced by [geninfo][] may be merged together +using the `lcov --add-tracefile` option, and may also be combined into a single +coverage report by passing many tracefile arguments to the [genhtml][] utility. +Unfortunately, both of these operations are prohibitively slow, which makes the +`--geninfo` option of limited use for the time being. + +----- +**Copyright 2013 Serval Project Inc.** +![CC-BY-4.0](./cc-by-4.0.png) +This document is available under the [Creative Commons Attribution 4.0 International licence][CC BY 4.0]. + + +[Serval Project]: http://www.servalproject.org/ +[CC BY 4.0]: ./LICENSE-DOCUMENTATION.md +[Serval DNA]: ../README.md +[Bash]: http://en.wikipedia.org/wiki/Bash_(Unix_shell) +[Bash Test Framework]: http://developer.servalproject.org/dokuwiki/doku.php?id=content:tech:bash_test_framework +[GNU grep]: http://www.gnu.org/software/grep/ +[GNU sed]: http://www.gnu.org/software/sed/ +[GNU awk]: http://www.gnu.org/software/gawk/ +[pgrep]: http://en.wikipedia.org/wiki/Pgrep +[pkill]: http://en.wikipedia.org/wiki/Pkill +[integration tests]: http://en.wikipedia.org/wiki/Integration_testing +[boilerplate code]: http://en.wikipedia.org/wiki/Boilerplate_code +[TAP]: http://en.wikipedia.org/wiki/Test_Anything_Protocol +[source code test coverage]: http://en.wikipedia.org/wiki/Code_coverage +[GCC]: https://gcc.gnu.org/ +[gcov(1)]: https://gcc.gnu.org/onlinedocs/gcc/Gcov.html +[GCNO]: https://gcc.gnu.org/onlinedocs/gcc-3.4.2/gcc/Gcov-Data-Files.html +[GCDA]: https://gcc.gnu.org/onlinedocs/gcc-3.4.2/gcc/Gcov-Data-Files.html +[lcov]: http://ltp.sourceforge.net/archive/old_pages/coverage/lcov.php +[geninfo]: http://ltp.sourceforge.net/coverage/lcov/geninfo.1.php +[genhtml]: http://ltp.sourceforge.net/coverage/lcov/genhtml.1.php +[atexit(3)]: http://man7.org/linux/man-pages/man3/atexit.3.html diff --git a/log.c b/log.c index b840c80a..62b44cb1 100644 --- a/log.c +++ b/log.c @@ -609,7 +609,7 @@ static void _rotate_log_file(_log_iterator *it) static void _flush_log_file() { - if (_log_file && _log_file != NO_FILE) { + if (_log_file && _log_file != NO_FILE && strbuf_len(&_log_file_strbuf) != 0) { fprintf(_log_file, "%s%s%s", strbuf_len(&_log_file_strbuf) ? strbuf_str(&_log_file_strbuf) : "", strbuf_len(&_log_file_strbuf) ? "\n" : "", @@ -619,8 +619,15 @@ static void _flush_log_file() } } +/* Discard any unwritten log messages and close the log file immediately. This should be called in + * any child process immediately after fork() to prevent any buffered log messages from being + * written twice into the log file. + * + * @author Andrew Bettison + */ void close_log_file() { + strbuf_reset(&_log_file_strbuf); if (_log_file && _log_file != NO_FILE) fclose(_log_file); _log_file = NULL; diff --git a/testframework.sh b/testframework.sh index a6d9ccea..6d4e2287 100644 --- a/testframework.sh +++ b/testframework.sh @@ -91,6 +91,11 @@ Options: -f -N --filter=-N Only execute tests with numbers <= N -f N- --filter=N- Only execute tests with numbers >= N -f ... --filter=M,N,... Only execute tests with number M or N or ... + -c --coverage Collect test coverage data + -cg --geninfo Invoke geninfo(1) to produce one coverage.info file + per test case (requires at least one --gcno-dir) + -cd DIR --gcno-dir=DIR Use test coverage GCNO files under DIR (overrides + TFW_GCNO_PATH env var) " } @@ -169,7 +174,8 @@ runTests() { _tfw_njobs=1 _tfw_log_noise=true _tfw_assert_noise=true - _tfw_logdir_script="${TFW_LOGDIR:-$_tfw_cwd/testlog}/$_tfw_script_name" + _tfw_logdir="${TFW_LOGDIR:-$_tfw_cwd/testlog}" + _tfw_logdir_script="$_tfw_logdir/$_tfw_script_name" _tfw_list=false _tfw_trace=false _tfw_verbose=false @@ -178,6 +184,9 @@ runTests() { _tfw_default_execute_timeout=60 _tfw_default_wait_until_timeout=60 _tfw_timeout_override= + _tfw_coverage=false + _tfw_geninfo=false + _tfw_gcno_path=() local allargs="$*" local -a filters=() local oo @@ -219,6 +228,14 @@ runTests() { _tfw_is_float "${1#*=}" || _tfw_fatal "invalid option: $1" _tfw_timeout_override="${1#*=}" ;; + -c|--coverage) _tfw_coverage=true;; + -cg|--geninfo) _tfw_coverage=true; _tfw_geninfo=true;; + -cd) [ -n "$2" ] || _tfw_fatal "missing argument after option: $1" + _tfw_gcno_path+=("$2") + shift + ;; + -cd*) _tfw_gcno_path+=("${1#-?}");; + --gcno-dir=*) _tfw_gcno_path+=("${1#*=}");; --) shift; break;; -*) _tfw_fatal "unsupported option: $1";; *) _tfw_fatal "spurious argument: $1";; @@ -229,6 +246,44 @@ runTests() { if $_tfw_verbose && [ $_tfw_njobs -ne 1 ]; then _tfw_fatal "--verbose is incompatible with --jobs=$_tfw_njobs" fi + # Handle --gcno-dir arguments, or if none given, $TFW_GCNO_PATH env var. + # Convert into a list of absolute directory paths. + if [ ${#_tfw_gcno_path[*]} -eq -0 ]; then + local oIFS="$IFS" + IFS=: + _tfw_gcno_path=($TFW_GCNO_PATH) + IFS="$oIFS" + else + local pathdir + for pathdir in "${_tfw_gcno_path[@]}"; do + [ -d "$pathdir" ] || _tfw_fatal "--gcno-dir: no such directory: '$pathdir'" + done + fi + _tfw_gcno_dirs=() + local pathdir + for pathdir in "${_tfw_gcno_path[@]}"; do + [ -d "$pathdir" ] && _tfw_gcno_dirs+=("$(abspath "$pathdir")") + done + # Handle --geninfo option. + if $_tfw_geninfo; then + if [ ${#_tfw_gcno_dirs[*]} -eq 0 ]; then + _tfw_fatal "--geninfo: requires at least one --gcno-dir=DIR or \$TFW_GCNO_PATH env var" + fi + _tfw_checkCommandInPATH geninfo + _tfw_checkCommandInPATH gcov _tfw_gcov_path + # Check that all source files are available. + _tfw_extract_source_files_from_gcno "${_tfw_gcno_dirs[@]}" + _tfw_coverage_source_basedir=. + if [ -n "$TFW_COVERAGE_SOURCE_BASE_DIR" ]; then + [ -d "$TFW_COVERAGE_SOURCE_BASE_DIR" ] || _tfw_fatal "--geninfo: no such directory '$TFW_COVERAGE_SOURCE_BASE_DIR' (\$TFW_COVERAGE_SOURCE_BASE_DIR)" + _tfw_coverage_source_basedir="$TFW_COVERAGE_SOURCE_BASE_DIR" + fi + local src + for src in "${_tfw_coverage_source_files[@]}"; do + local path="$_tfw_coverage_source_basedir/$src" + [ -r "$path" ] || _tfw_fatal "--geninfo: missing source file $path" + done + fi # Enumerate all the test cases. _tfw_list_tests # If we are only asked to list them, then do so and finish. @@ -306,6 +361,29 @@ runTests() { _tfw_tmp=$_tfw_tmpdir/_tfw-$_tfw_unique trap '_tfw_status=$?; rm -rf "$_tfw_tmp"; exit $_tfw_status' EXIT SIGHUP SIGINT SIGTERM mkdir $_tfw_tmp || _tfw_fatalexit + # Set up test coverage data directory, which contains all the .gcno + # files of the executable(s) under test. If using geninfo(1) to + # generate coverage info files, then link to all the source files, to + # ensure that temporary .gcov files are created in this directory and + # not in the repository's base directory (which would cause race + # conditions). + if $_tfw_coverage; then + export GCOV_PREFIX="$_tfw_logdir_test/gcov" + export GCOV_PREFIX_STRIP=0 + mkdir "$GCOV_PREFIX" || _tfw_fatalexit + # Link to GCNO files. + if [ ${#_tfw_gcno_dirs[*]} -ne 0 ]; then + find "${_tfw_gcno_dirs[@]}" -type f -name '*.gcno' -print0 | cpio -0pdl --quiet "$GCOV_PREFIX" + fi + # Link source files to where geninfo(1) will always find them before + # finding the original source files. + if $_tfw_geninfo; then + pushd "$_tfw_coverage_source_basedir" >/dev/null || _tfw_fatalexit + find "${_tfw_coverage_source_files[@]}" -maxdepth 0 -print0 | cpio -0pdl --quiet "$GCOV_PREFIX" + popd >/dev/null + fi + fi + ## XXX _tfw_geninfo_initial "$scriptName/$testName" >$_tfw_tmp/log.geninfo 2>&1 local start_time=$(_tfw_timestamp) local finish_time=unknown ( #)#<-- fixes Vim syntax highlighting @@ -388,6 +466,17 @@ runTests() { fi } >"$_tfw_logdir_test/log.txt" mv "$_tfw_logdir_test" "$_tfw_logdir_test.$result" + _tfw_logdir_test="$_tfw_logdir_test.$result" + if $_tfw_geninfo; then + local testname=$(_tfw_string_to_identifier "$scriptName/$testName") + local result="$_tfw_tmp/result.info" + local coverage="$_tfw_logdir_test/coverage.info" + { + echo '++++++++++ log.geninfo ++++++++++' + _tfw_run_geninfo "$coverage" --test-name "$testname" 2>&1 + echo '++++++++++' + } >>"$_tfw_logdir_test/log.txt" + fi exit 0 ) /dev/null | $SED -n -e '1s/^\[\([0-9]\{1,\}\)\].*/\1/p') @@ -564,6 +653,36 @@ _tfw_echo_result() { esac } +_tfw_extract_source_files_from_gcno() { + # This should possibly be done by creating a binary utility that knows how to + # disassemble GCNO files. In the meantime, this approach seems to work: + # simply extract all strings from all GCNO files that match *.c or *.h. + local IFS=' +' + _tfw_coverage_source_files=($(find "$@" -type f -name '*.gcno' -print0 | xargs -0 strings | grep '\.[ch]$' | sort -u)) +} + +_tfw_run_geninfo() { + local infofile="$1" + shift + geninfo \ + --rc lcov_tmp_dir="$_tfw_tmp" \ + --gcov-tool "$_tfw_gcov_path" \ + --output-file "$infofile" \ + --no-external \ + "$@" \ + "$_tfw_logdir_test/gcov" + # Cook the absolute source file paths in the info file to refer to the + # original source files, not the links we placed into the gcov subdirectory + # in order to avoid race conditions. + local basedir="$(abspath "$_tfw_coverage_source_basedir")" + $SED -i -e "/^SF:/s:$_tfw_logdir_test/gcov:$basedir:" "$infofile" +} + +_tfw_string_to_identifier() { + echo "$1" | $SED -e 's/\//__/g' -e 's/[^0-9a-zA-Z_]/_/g' +} + # Internal (private) functions that are not to be invoked directly from test # scripts. @@ -716,7 +835,7 @@ _tfw_execute() { fi if [ -n "$timeout" ]; then if type pgrep >/dev/null 2>/dev/null; then - ( #)#(fix Vim syntax colouring + ( #)#( <<- fixes Vim syntax colouring # For some reason, set -e does not work here. So all the following # commands are postfixed with || exit $? local executable_pid=$(pgrep -P $subshell_pid) || exit $? @@ -1152,10 +1271,27 @@ _tfw_checkTerminfo() { } _tfw_checkCommandInPATH() { - case $(type -p "$1") in + local __var="$2" + local __path="$(type -p "$1")" + case "$__path" in */"${1##*/}") ;; *) _tfw_fatal "command not found: $1 (PATH=$PATH)" esac + [ -n "$__var" ] && eval $__var='"$__path"' + return 0 +} + +_tfw_count_path_components() { + local path="$1" + local i=0 + while [ -n "$path" ]; do + case "$path" in + */) path="${path%/}";; + */*) i=$(($i + 1)); path="${path%/*}";; + *) i=$(($i + 1)); path=;; + esac + done + echo $i } _tfw_unpack_words() {