diff --git a/README.md b/README.md
index 0b1e8f315..9bb58ccc6 100644
--- a/README.md
+++ b/README.md
@@ -234,6 +234,100 @@ if supported, set number of concurrent builds to `PARALLELISM`
 
 ## Testing
 
+`jenkins/helper/test_launch_controller.py` is used to control multiple test executions.
+
+### Its dependencies over stock python3 are:
+ - psutil to control subprocesses
+ - py7zr (optional) to build 7z reports instead of tar.bz2
+
+### It's reading these environment variables:
+- `INNERWORKDIR` - as the directory to place the report files
+- `WORKDIR` - used instead if `INNERWORKDIR` hasn't been set.
+- `TEMP` - temporary directory if not `INNERWORKDIR`/ArangoDB
+- `TMPDIR` and `TEMP` are passed to the executors.
+- `TSHARK` passed as value to `--sniffProgram`
+- `DUMPDEVICE` passed as value to `--sniffDevice`
+- `SKIPNONDETERMINISTIC` passed on as value to `--skipNondeterministic` to the testing.
+- `SKIPTIMECRITICAL` passed on as value to `--skipTimeCritical` to the testing.
+- `BUILDMODE` passed on as value to `--buildType` to the testing.
+- `DUMPAGENCYONERROR` passed on as value to `--dumpAgencyOnError` to the testing.
+- `PORTBASE` passed on as value to `--minPort` and `--maxPort` (+99) to the testing. Defaults to 7000
+- `SKIPGREY` passed on as value to `--skipGrey` to the testing.
+- `ONLYGREY` passed on as value to `--onlyGrey` to the testing.
+- `TIMELIMIT` is used to calculate the execution deadline starting point in time.
+- `COREDIR` the directory to locate coredumps for crashes
+- `LDAPHOST` to enable the tests with `ldap` flags.
+- any parameter in `test-definition.txt` that starts with a `$` is expanded to its value.
+
+### Its Parameters are:
+ - `PATH/test-definition.txt` - (first parameter) test definitions file from the arangodb source tree
+   (also used to locate the arangodb source)
+ - `-f` `[launch|dump]` use `dump` for syntax checking of `test-definition.txt` instead of executing the tests
+ - `--validate-only` don't run the tests
+ - `--help-flags` list the flags which can be used in `test-definition.txt`:
+    - `cluster`: this test requires a cluster
+    - `single`: this test requires a single server
+    - `full`: this test is only executed in full tests
+    - `!full`: this test is only executed in non-full tests
+    - `gtest`: only the gtests are to be executed
+    - `ldap`: ldap
+    - `enterprise`: this test is only executed with the enterprise version
+    - `!windows`: test is excluded from ps1 output
+ - `--cluster` filter `test-definition.txt` for all tests flagged as `cluster`
+ - `--full` - all tests including those flagged as `full` are executed.
+ - `--gtest` - only gtests are executed
+ - `--all` - output unfiltered
+ 
+### Syntax in `test-definition.txt`
+Lines consist of these parts:
+```
+testingJsSuiteName flags params suffix -- args to testing.js
+```
+where 
+- `flags` are listed above in `--help-flags`
+- params are:
+  - weight - sequence priority of test, 250 is the default.
+  - wweight - execution slots to book. defaults to 1, if cluster 4.
+  - buckets - split testcases to be launched in concurent chunks
+  Specifying a `*` in front of the number takes the default and multiplies it by the value.
+- suffix - if a testsuite is launched several times, make it distinguishable
+  like shell_aql => shell_aql_vst ; Bucket indexes are appended afterwards.
+- `--` literally the two dashes to split the line at.
+- `args to testing.js` - anything that `./scripts/unittest --help` would print you.
+
+### job scheduling
+To utilize all of the machines resources, tests can be run in parallel. The `execution_slots` are 
+set to the number of the physical cores of the machine (not threads).
+`wweight` is used to add the currently expected load by the tests to be no more than `execution_slots`.
+
+For managing each of these parallel executions of testing.js, worker threads are used. The workers
+themselves will spawn a set of I/O threads to capture the output of testing.js into a report file.
+
+The life cycle of a testrun will be as follows:
+
+ - the environment variable `TIMELIMIT` defines a *deadline* to all the tests, how much seconds should be allowed.
+ - tests are running in worker threads.
+ - main thread keeps control, launches more worker threads, once machine bandwith permits, but only every 5s as closest to not overwhelm the machine while launching arangods.
+ - tests themselves have their timeouts; `testing.js` will abort if they are reached.
+ - workers have a progressive timeout, if it doesn't hear back from `testing.js` for 999999999s it will hard kill and abort. [currently high / not used!]
+ - if workers have no output from `testing.js` they check whether the *deadline* is reached.
+ - if the *deadline* is reached, `SIG_INT`[* nix] / `SIG_BREAK`[windows] is sent to `testing.js` to trigger its *deadline* feature.
+ - the reached *deadline* will be indicated to the `testfailures.txt` report file and the logfile of the test in question.
+ - with *deadline* engageged, `testing.js` can send no more subsequent requests, nor spawn processes => eventually testing will abort.
+ - force shutdown of arangod Instances will reset the deadline, SIG_ABRT arangods, and try to do core dump analysis.
+ - workers continue reading pipes from `testing.js`, but once no chars are comming, `waitpid()` checks with a 1s timout whether `testing.js` is done and exited.
+ - if the worker reaches `180` counters of `waitpid()` invocations it will give up. It will hard kill `testing.js` and all other child processes it can find.
+ - this should unblock the workers I/O threads, and they should exit.
+ - the `waitpid()` on `testing.js` should exit, I/O threads should be joined, results should be passed up to the main thread.
+ - so the workers still have a slugish interpretation of the *deadline*, giving them the chance to collect as much knowledge about the test execution as posible.
+ - meanwhile the main thread has a *fixed* deadline: 5 minutes after the `TIMELIMIT` is reached.
+ - if not all workers have indicated their exit before this final deadline:
+   - the main thread will start killing any subprocesses of itself which it finds.
+   - after this wait another 20s, to see whether the workers may have been unblocked by the killing
+ - if not, it shouts "Geronimoooo" and takes the big shotgun, and force-terminates the python process which is running it. This will kill all threads as well and terminate the process.
+ - if all workers have indicated their exit in time, their threads will be joined.
+ - reports will be generated.
+
 ## Packaging
 
     makeRelease
diff --git a/containers/buildUbuntu3.docker/Dockerfile b/containers/buildUbuntu3.docker/Dockerfile
index d51604e1c..88203c94d 100644
--- a/containers/buildUbuntu3.docker/Dockerfile
+++ b/containers/buildUbuntu3.docker/Dockerfile
@@ -7,13 +7,13 @@ ENV COMPILER_VERSION 9
 
 ENV CLANG_VERSION 12
 
-RUN apt-get update && \
+RUN apt-get update --fix-missing && \
     apt-get install -y software-properties-common && \
     add-apt-repository -y ppa:ubuntu-toolchain-r/test && \
     apt-get update && \
-    apt-get upgrade -y
-
-RUN apt-get install -y build-essential gcc-${COMPILER_VERSION} g++-${COMPILER_VERSION} cmake make bison flex python python3-distutils ccache git libjemalloc-dev vim exuberant-ctags gdb fish ruby ruby-httparty ruby-rspec psmisc sudo debhelper debconf jq wget libdb-dev curl gnupg2 gcovr prometheus && gem install persistent_httparty && apt-get clean
+    apt-get upgrade -y && \
+    apt-get install -y build-essential gcc-${COMPILER_VERSION} g++-${COMPILER_VERSION} cmake make bison flex python3-psutil python3-distutils ccache git libjemalloc-dev vim exuberant-ctags gdb fish psmisc sudo debhelper debconf jq wget libdb-dev curl gnupg2 gcovr prometheus &&\
+    apt-get clean
 
 RUN curl -L https://github.com/mozilla/sccache/releases/download/v0.2.15/sccache-v0.2.15-${ARCH}-unknown-linux-musl.tar.gz | tar xvz -C /tmp && mv /tmp/sccache-v0.2.15-${ARCH}-unknown-linux-musl/sccache /usr/bin/sccache && chmod +x /usr/bin/sccache && rm -rf /tmp/sccache-v0.2.15-${ARCH}-unknown-linux-musl
 
diff --git a/containers/buildUbuntu4.docker/Dockerfile b/containers/buildUbuntu4.docker/Dockerfile
index 82a7b868d..bd5242474 100644
--- a/containers/buildUbuntu4.docker/Dockerfile
+++ b/containers/buildUbuntu4.docker/Dockerfile
@@ -1,3 +1,4 @@
+
 FROM ubuntu:20.04
 MAINTAINER Max Neunhoeffer <max@arangodb.com>
 
@@ -7,12 +8,12 @@ ENV COMPILER_VERSION 9
 
 ENV CLANG_VERSION 12
 
-RUN apt-get update && \
+RUN apt-get update --fix-missing && \
     apt-get install -y software-properties-common && \
     apt-get update && \
     apt-get upgrade -y
 
-RUN apt-get install -y build-essential gcc-${COMPILER_VERSION} g++-${COMPILER_VERSION} cmake make bison flex python ccache git libjemalloc-dev vim exuberant-ctags gdb fish ruby ruby-httparty ruby-rspec psmisc sudo debhelper debconf jq wget libdb-dev curl gnupg2 gcovr prometheus && gem install persistent_httparty && apt-get clean
+RUN apt-get install -y build-essential gcc-${COMPILER_VERSION} g++-${COMPILER_VERSION} cmake make bison flex python ccache git libjemalloc-dev vim exuberant-ctags gdb fish psmisc sudo debhelper debconf jq wget libdb-dev curl gnupg2 gcovr prometheus && apt-get clean
 
 RUN curl -L https://github.com/mozilla/sccache/releases/download/v0.2.15/sccache-v0.2.15-${ARCH}-unknown-linux-musl.tar.gz | tar xvz -C /tmp && mv /tmp/sccache-v0.2.15-${ARCH}-unknown-linux-musl/sccache /usr/bin/sccache && chmod +x /usr/bin/sccache && rm -rf /tmp/sccache-v0.2.15-${ARCH}-unknown-linux-musl
 
@@ -32,4 +33,6 @@ RUN git config --global --add safe.directory /work/ArangoDB
 RUN git config --global --add safe.directory /work/ArangoDB/enterprise
 RUN git config --global --add safe.directory /work/ArangoDB/docs
 
+RUN apt-get install -y python3-pip && pip3 install py7zr psutil 
+
 CMD [ "/usr/bin/fish" ]
diff --git a/containers/buildUbuntu5.docker/Dockerfile b/containers/buildUbuntu5.docker/Dockerfile
index 5524bbc55..6e244fc6b 100644
--- a/containers/buildUbuntu5.docker/Dockerfile
+++ b/containers/buildUbuntu5.docker/Dockerfile
@@ -7,12 +7,12 @@ ENV COMPILER_VERSION 10
 
 ENV CLANG_VERSION 12
 
-RUN apt-get update && \
+RUN apt-get update --fix-missing && \
     apt-get install -y software-properties-common && \
     apt-get update && \
     apt-get upgrade -y
 
-RUN apt-get install -y build-essential gcc-${COMPILER_VERSION} g++-${COMPILER_VERSION} cmake make bison flex python ccache git libjemalloc-dev vim exuberant-ctags gdb fish ruby ruby-httparty ruby-rspec psmisc sudo debhelper debconf jq wget libdb-dev curl gnupg2 gcovr prometheus && gem install persistent_httparty && apt-get clean
+RUN apt-get install -y build-essential gcc-${COMPILER_VERSION} g++-${COMPILER_VERSION} cmake make bison flex python ccache git libjemalloc-dev vim exuberant-ctags gdb fish psmisc sudo debhelper debconf jq wget libdb-dev curl gnupg2 gcovr prometheus && apt-get clean
 
 RUN curl -L https://github.com/mozilla/sccache/releases/download/v0.2.15/sccache-v0.2.15-${ARCH}-unknown-linux-musl.tar.gz | tar xvz -C /tmp && mv /tmp/sccache-v0.2.15-${ARCH}-unknown-linux-musl/sccache /usr/bin/sccache && chmod +x /usr/bin/sccache && rm -rf /tmp/sccache-v0.2.15-${ARCH}-unknown-linux-musl
 
@@ -37,4 +37,6 @@ RUN git config --global --add safe.directory /work/ArangoDB/3rdParty/iresearch
 RUN git config --global --add safe.directory /work/ArangoDB/3rdParty/rocksdb
 RUN git config --global --add safe.directory /work/ArangoDB/3rdParty/velocypack
 
+RUN apt-get install -y python3-pip && pip3 install py7zr psutil 
+
 CMD [ "/usr/bin/fish" ]
diff --git a/containers/buildUbuntu6.docker/Dockerfile b/containers/buildUbuntu6.docker/Dockerfile
index 990200062..88f50356f 100644
--- a/containers/buildUbuntu6.docker/Dockerfile
+++ b/containers/buildUbuntu6.docker/Dockerfile
@@ -7,7 +7,7 @@ ENV COMPILER_VERSION 11
 
 ENV CLANG_VERSION 14
 
-RUN apt-get update && \
+RUN apt-get update --fix-missing && \
     apt-get install -y software-properties-common && \
     apt-get update && \
     apt-get upgrade -y
@@ -39,4 +39,6 @@ RUN git config --global --add safe.directory /work/ArangoDB/3rdParty/iresearch
 RUN git config --global --add safe.directory /work/ArangoDB/3rdParty/rocksdb
 RUN git config --global --add safe.directory /work/ArangoDB/3rdParty/velocypack
 
+RUN apt-get install -y python3-pip && pip3 install py7zr psutil
+
 CMD [ "/usr/bin/fish" ]
diff --git a/helper.fish b/helper.fish
index e06d39d5c..9efb2313f 100644
--- a/helper.fish
+++ b/helper.fish
@@ -95,7 +95,8 @@ end
 function single ; set -gx TESTSUITE single ; end
 function cluster ; set -gx TESTSUITE cluster ; end
 function resilience ; set -gx TESTSUITE resilience ; end
-function catchtest ; set -gx TESTSUITE catchtest ; end
+function catchtest ; set -gx TESTSUITE gtest ; end
+function gtest ; set -gx TESTSUITE gtest ; end
 if test -z "$TESTSUITE" ; cluster
 else ; set -gx TESTSUITE $TESTSUITE ; end
 
@@ -1484,7 +1485,7 @@ function showConfig
   printf $fmt3 'SkipGrey'       $SKIPGREY      '(skipGrey/includeGrey)'
   printf $fmt3 'OnlyGrey'       $ONLYGREY      '(onlyGreyOn/onlyGreyOff)'
   printf $fmt3 'Storage engine' $STORAGEENGINE '(mmfiles/rocksdb)'
-  printf $fmt3 'Test suite'     $TESTSUITE     '(single/cluster/resilience/catchtest)'
+  printf $fmt3 'Test suite'     $TESTSUITE     '(single/cluster/resilience/gtest)'
   printf $fmt2 'Log Levels'     (echo $LOG_LEVELS)
   echo
   echo 'Package Configuration'
@@ -1943,7 +1944,7 @@ function moveResultsToWorkspace
       mv $WORKDIR/work/coverage $WORKSPACE
     end
 
-    set -l matches $WORKDIR/work/*.{asc,deb,dmg,rpm,tar.gz,tar.bz2,zip,html,csv}
+    set -l matches $WORKDIR/work/*.{asc,testfailures.txt,deb,dmg,rpm,7z,tar.gz,tar.bz2,zip,html,csv}
     for f in $matches
       echo $f | grep -qv testreport ; and echo "mv $f" ; and mv $f $WORKSPACE; or echo "skipping $f"
     end
diff --git a/helper.linux.fish b/helper.linux.fish
index 3ab9188fa..09f8e19f7 100644
--- a/helper.linux.fish
+++ b/helper.linux.fish
@@ -15,19 +15,19 @@ set -gx ARCH (uname -m)
 set IMAGE_ARGS "--build-arg ARCH=$ARCH"
 
 set -gx UBUNTUBUILDIMAGE3_NAME arangodb/ubuntubuildarangodb3-$ARCH
-set -gx UBUNTUBUILDIMAGE3_TAG 16
+set -gx UBUNTUBUILDIMAGE3_TAG 17
 set -gx UBUNTUBUILDIMAGE3 $UBUNTUBUILDIMAGE3_NAME:$UBUNTUBUILDIMAGE3_TAG
 
 set -gx UBUNTUBUILDIMAGE4_NAME arangodb/ubuntubuildarangodb4-$ARCH
-set -gx UBUNTUBUILDIMAGE4_TAG 17
+set -gx UBUNTUBUILDIMAGE4_TAG 18
 set -gx UBUNTUBUILDIMAGE4 $UBUNTUBUILDIMAGE4_NAME:$UBUNTUBUILDIMAGE4_TAG
 
 set -gx UBUNTUBUILDIMAGE5_NAME arangodb/ubuntubuildarangodb5-$ARCH
-set -gx UBUNTUBUILDIMAGE5_TAG 10
+set -gx UBUNTUBUILDIMAGE5_TAG 11
 set -gx UBUNTUBUILDIMAGE5 $UBUNTUBUILDIMAGE5_NAME:$UBUNTUBUILDIMAGE5_TAG
 
 set -gx UBUNTUBUILDIMAGE6_NAME arangodb/ubuntubuildarangodb6-$ARCH
-set -gx UBUNTUBUILDIMAGE6_TAG 2
+set -gx UBUNTUBUILDIMAGE6_TAG 3
 set -gx UBUNTUBUILDIMAGE6 $UBUNTUBUILDIMAGE6_NAME:$UBUNTUBUILDIMAGE6_TAG
 
 set -gx UBUNTUPACKAGINGIMAGE arangodb/ubuntupackagearangodb-$ARCH:1
@@ -454,11 +454,11 @@ function oskar
   and if test "$SAN" = "On"
     parallelism 2
     clearSanStatus
-    runInContainer --cap-add SYS_NICE --cap-add SYS_PTRACE (findBuildImage) $SCRIPTSDIR/runTests.fish $argv
+    runInContainer --security-opt seccomp=unconfined --cap-add SYS_NICE --cap-add SYS_PTRACE (findBuildImage) $SCRIPTSDIR/runTests.fish $argv
     set s $status
     set s (math $s + (getSanStatus))
   else
-    runInContainer --cap-add SYS_NICE (findBuildImage) $SCRIPTSDIR/runTests.fish $argv
+    runInContainer --security-opt seccomp=unconfined --cap-add SYS_NICE (findBuildImage) $SCRIPTSDIR/runTests.fish $argv
     set s $status
   end
 
diff --git a/helper.psm1 b/helper.psm1
index cf7565bb0..512ca0082 100644
--- a/helper.psm1
+++ b/helper.psm1
@@ -42,6 +42,7 @@ Else
         $global:TSHARK = ""
   }
 }
+$ENV:TSHARK=$global:TSHARK
 
 $global:HANDLE_EXE = $null
 If (Get-Command handle.exe -ErrorAction SilentlyContinue)
@@ -65,7 +66,7 @@ Else
 {
   Remove-Item "$global:COREDIR\*" -Recurse -Force
 }
-$global:RUBY = (Get-Command ruby.exe).Path
+$env:COREDIR=$global:COREDIR
 $global:INNERWORKDIR = "$WORKDIR\work"
 $global:ARANGODIR = "$INNERWORKDIR\ArangoDB"
 $global:ENTERPRISEDIR = "$global:ARANGODIR\enterprise"
@@ -582,6 +583,11 @@ Function showConfig
     Write-Host "------------------------------------------------------------------------------"
     Write-Host "Cache Statistics"
     showCacheStats
+    $ENV:SKIPNONDETERMINISTIC = $SKIPNONDETERMINISTIC
+    $ENV:SKIPTIMECRITICAL = $SKIPTIMECRITICAL
+    $ENV:SKIPGREY = $SKIPGREY
+    $ENV:ONLYGREY = $ONLYGREY
+    $ENV:BUILDMODE = $BUILDMODE
     comm
 }
 
@@ -600,7 +606,7 @@ Function resilience
 Function catchtest
 {
     $global:TESTSUITE = "catchtest"
-    $global:TESTSUITE_TIMEOUT = 1800
+    $global:TIMELIMIT = 1800
 }
 If (-Not($TESTSUITE))
 {
diff --git a/jenkins/helper/allure_commons/_allure.py b/jenkins/helper/allure_commons/_allure.py
new file mode 100644
index 000000000..628cea2be
--- /dev/null
+++ b/jenkins/helper/allure_commons/_allure.py
@@ -0,0 +1,2 @@
+def attach(**kwargs):
+    return
diff --git a/jenkins/helper/asciiprint.py b/jenkins/helper/asciiprint.py
new file mode 100644
index 000000000..85f37267a
--- /dev/null
+++ b/jenkins/helper/asciiprint.py
@@ -0,0 +1,79 @@
+#!env python
+""" removes terminal control sequences and other non ascii characters """
+import unicodedata
+import re
+import sys
+
+is_tty = sys.stdout.isatty()
+PROGRESS_COUNT = 0
+
+# 7-bit C1 ANSI sequences
+ANSI_ESCAPE_B = re.compile(
+    rb"""
+    \x1B  # ESC
+    \xE2  # throbber...
+    \xA0  # throbber...
+    \xA7  # throbber...
+    \x8F  # throbber...
+    \r    # cariage return
+    (?:   # 7-bit C1 Fe (except CSI)
+        [@-Z\\-_]
+    |     # or [ for CSI, followed by a control sequence
+        \[
+        [0-?]*  # Parameter bytes
+        [ -/]*  # Intermediate bytes
+        [@-~]   # Final byte
+    )
+""",
+    re.VERBOSE,
+)
+
+
+def ascii_convert(the_bytes: bytes):
+    """convert string to only be ascii without control sequences"""
+    return ANSI_ESCAPE_B.sub(rb"", the_bytes).decode("utf-8")
+
+
+# 7-bit C1 ANSI sequences
+ANSI_ESCAPE = re.compile(
+    r"""
+    \x1B  # ESC
+    \xE2  # throbber...
+    \xA0  # throbber...
+    \xA7  # throbber...
+    \x8F  # throbber...
+    \r    # cariage return
+    (?:   # 7-bit C1 Fe (except CSI)
+        [@-Z\\-_]
+    |     # or [ for CSI, followed by a control sequence
+        \[
+        [0-?]*  # Parameter bytes
+        [ -/]*  # Intermediate bytes
+        [@-~]   # Final byte
+    )
+""",
+    re.VERBOSE,
+)
+
+
+def ascii_convert_str(the_str: str):
+    """convert string to only be ascii without control sequences"""
+    return ANSI_ESCAPE.sub(rb"", the_str)
+
+
+def ascii_print(string):
+    """convert string to only be ascii without control sequences"""
+    string = ANSI_ESCAPE.sub("", string)
+    print("".join(ch for ch in string if ch == "\n" or unicodedata.category(ch)[0] != "C"))
+
+
+def print_progress(char):
+    """print a throbber alike that immediately is sent to the console"""
+    # pylint: disable=global-statement
+    global PROGRESS_COUNT
+    print(char, end="")
+    PROGRESS_COUNT += 1
+    if not is_tty and PROGRESS_COUNT % 10 == 0:
+        # add a linebreak so we see something in jenkins (if):
+        print("\n")
+    sys.stdout.flush()
diff --git a/jenkins/helper/async_client.py b/jenkins/helper/async_client.py
new file mode 100644
index 000000000..c9f73669b
--- /dev/null
+++ b/jenkins/helper/async_client.py
@@ -0,0 +1,448 @@
+#!/usr/bin/env python
+""" Run a javascript command by spawning an arangosh
+    to the configured connection """
+
+import os
+from queue import Queue, Empty
+import platform
+import signal
+import sys
+from datetime import datetime, timedelta
+from subprocess import PIPE
+from threading import Thread
+import psutil
+from allure_commons._allure import attach
+
+from tools.asciiprint import print_progress as progress
+# import tools.loghelper as lh
+# pylint: disable=dangerous-default-value
+
+ON_POSIX = "posix" in sys.builtin_module_names
+IS_WINDOWS = platform.win32_ver()[0] != ""
+
+def print_log(string, params):
+    """ only print if thread debug logging is enabled """
+    if params['trace_io']:
+        print(string)
+
+def default_line_result(wait, line, params):
+    """
+    Keep the line, filter it for leading #,
+    if verbose print the line. else print progress.
+    """
+    # pylint: disable=pointless-statement
+    if params['verbose'] and wait > 0 and line is None:
+        progress("sj" + str(wait))
+        return True
+    if isinstance(line, tuple):
+        if params['verbose']:
+            print("e: " + str(line[0], 'utf-8').rstrip())
+        if not str(line[0]).startswith("#"):
+            params['output'].append(line[0])
+        else:
+            return False
+    return True
+def make_default_params(verbose):
+    """ create the structure to work with arrays to output the strings to """
+    return {
+        "trace_io": False,
+        "error": "",
+        "verbose": verbose,
+        "output": [],
+        "identifier": ""
+    }
+
+def make_logfile_params(verbose, logfile, trace):
+    """ create the structure to work with logfiles """
+    return {
+        "trace_io": True,
+        "trace": trace,
+        "error": "",
+        "verbose": verbose,
+        "output": logfile.open('wb'),
+        "identifier": "",
+        "lfn": str(logfile)
+    }
+def logfile_line_result(wait, line, params):
+    """ Write the line to a logfile, print progress. """
+    # pylint: disable=pointless-statement
+    if params['trace'] and wait > 0 and line is None:
+        progress("sj" + str(wait))
+        return True
+    if isinstance(line, tuple):
+        if params['trace']:
+            print("e: " + str(line[0], 'utf-8').rstrip())
+        params['output'].write(line[0])
+    return True
+def delete_logfile_params(params):
+    """ teardown the structure to work with logfiles """
+    print(f"{params['identifier']} closing {params['lfn']}")
+    params['output'].flush()
+    params['output'].close()
+    print(f"{params['identifier']} {params['lfn']} closed")
+
+
+def enqueue_stdout(std_out, queue, instance, identifier, params):
+    """add stdout to the specified queue"""
+    try:
+        for line in iter(std_out.readline, b""):
+            # print("O: " + str(line))
+            queue.put((line, instance))
+    except ValueError as ex:
+        print_log(f"{identifier} communication line seems to be closed: {str(ex)}", params)
+    print_log(f"{identifier} x0 done!", params)
+    queue.put(-1)
+    std_out.close()
+
+
+def enqueue_stderr(std_err, queue, instance, identifier, params):
+    """add stderr to the specified queue"""
+    try:
+        for line in iter(std_err.readline, b""):
+            # print("E: " + str(line))
+            queue.put((line, instance))
+    except ValueError as ex:
+        print_log(f"{identifier} communication line seems to be closed: {str(ex)}", params)
+    print_log(f"{identifier} x1 done!", params)
+    queue.put(-1)
+    std_err.close()
+
+
+def convert_result(result_array):
+    """binary -> string"""
+    result = ""
+    for one_line in result_array:
+        if isinstance(one_line, str):
+            result += "\n" + one_line.rstrip()
+        else:
+            result += "\n" + one_line.decode("utf-8").rstrip()
+    return result
+
+def add_message_to_report(params, string):
+    """ add a message from python to the report strings/files + print it """
+    print(string)
+    if isinstance(params['output'], list):
+        params['output'] += f"{'v'*80}\n{datetime.now()}>>>{string}<<<\n{'^'*80}\n"
+    else:
+        params['output'].write(bytearray(
+            f"{'v'*80}\n{datetime.now()}>>>{string}<<<\n{'^'*80}\n", "utf-8"))
+        params['output'].flush()
+    sys.stdout.flush()
+    return string + '\n'
+
+def kill_children(identifier, params, children):
+    """ slash all processes enlisted in children - if they still exist """
+    err = ""
+    killed = []
+    for one_child in children:
+        if one_child.pid in killed:
+            continue
+        try:
+            killed.append(one_child.pid)
+            err += add_message_to_report(
+                params,
+                f"{identifier}: killing {one_child.name()} - {str(one_child.pid)}")
+            one_child.resume()
+        except FileNotFoundError:
+            pass
+        except AttributeError:
+            pass
+        except ProcessLookupError:
+            pass
+        except psutil.NoSuchProcess:
+            pass
+        except psutil.AccessDenied:
+            pass
+        try:
+            one_child.kill()
+        except psutil.NoSuchProcess:  # pragma: no cover
+            pass
+    print_log(f"{identifier}: Waiting for the children to terminate", params)
+    psutil.wait_procs(children, timeout=20)
+    return err
+
+class CliExecutionException(Exception):
+    """transport CLI error texts"""
+
+    def __init__(self, message, execution_result, have_timeout):
+        super().__init__()
+        self.execution_result = execution_result
+        self.message = message
+        self.have_timeout = have_timeout
+
+def expect_failure(expect_to_fail, ret, params):
+    """ convert results, throw error if wanted """
+    attach(str(ret['rc_exit']), f"Exit code: {str(ret['rc_exit'])} == {expect_to_fail}")
+    res = (None,None,None,None)
+    if ret['have_deadline'] or ret['progressive_timeout']:
+        res = (False, convert_result(params['output']), 0, ret['line_filter'])
+        raise CliExecutionException("Execution failed.",
+                                    res,
+                                    ret['progressive_timeout'] or ret['have_deadline'])
+    if ret['rc_exit'] != 0:
+        res = (False, convert_result(params['output']), 0, ret['line_filter'])
+        if expect_to_fail:
+            return res
+        raise CliExecutionException("Execution failed.", res, False)
+
+    if not expect_to_fail:
+        if len(params['output']) == 0:
+            res = (True, "", 0, ret['line_filter'])
+        else:
+            res = (True, convert_result(params['output']), 0, ret['line_filter'])
+        return res
+
+    if len(params['output']) == 0:
+        res = (True, "", 0, ret['line_filter'], params['error'])
+    else:
+        res = (True, convert_result(params['output']), 0, ret['line_filter'])
+    raise CliExecutionException(
+        f"{params.identifier} Execution was expected to fail, but exited successfully.",
+        res, ret['progressive_timeout'])
+
+ID_COUNTER=0
+class ArangoCLIprogressiveTimeoutExecutor:
+    """
+    Abstract base class to run arangodb cli tools
+    with username/password/endpoint specification
+    timeout will be relative to the last thing printed.
+    """
+
+    # pylint: disable=too-few-public-methods too-many-arguments disable=too-many-instance-attributes disable=too-many-statements disable=too-many-branches disable=too-many-locals
+    def __init__(self, config, connect_instance, deadline_signal=-1):
+        """launcher class for cli tools"""
+        self.connect_instance = connect_instance
+        self.cfg = config
+        self.deadline_signal = deadline_signal
+        if self.deadline_signal == -1:
+            # pylint: disable=no-member
+            # yes, one is only there on the wintendo, the other one elsewhere.
+            if IS_WINDOWS:
+                self.deadline_signal = signal.CTRL_BREAK_EVENT
+            else:
+                self.deadline_signal = signal.SIGINT
+
+
+    def run_arango_tool_monitored(
+            self,
+            executeable,
+            more_args,
+            use_default_auth=True,
+            params={"error": "", "verbose": True, "output":[]},
+            progressive_timeout=60,
+            deadline=0,
+            deadline_grace_period=180,
+            result_line_handler=default_line_result,
+            expect_to_fail=False,
+            identifier=""
+    ):
+        """
+        runs a script in background tracing with
+        a dynamic timeout that its got output
+        (is still alive...)
+        """
+        # fmt: off
+        passvoid = ''
+        if self.cfg.passvoid:
+            passvoid  = str(self.cfg.passvoid)
+        elif self.connect_instance:
+            passvoid = str(self.connect_instance.get_passvoid())
+        if passvoid is None:
+            passvoid = ''
+
+        run_cmd = [
+            "--log.foreground-tty", "true",
+            "--log.force-direct", "true",
+        ]
+        if self.connect_instance:
+            run_cmd += ["--server.endpoint", self.connect_instance.get_endpoint()]
+            if use_default_auth:
+                run_cmd += ["--server.username", str(self.cfg.username)]
+                run_cmd += ["--server.password", passvoid]
+
+        run_cmd += more_args
+        ret = self.run_monitored(executeable,
+                                 run_cmd,
+                                 params,
+                                 progressive_timeout,
+                                 deadline,
+                                 deadline_grace_period,
+                                 result_line_handler,
+                                 identifier)
+        return expect_failure(expect_to_fail, ret, params)
+
+    # fmt: on
+    def run_monitored(self,
+                      executeable,
+                      args,
+                      params={"error": "", "verbose": True, "output":[]},
+                      progressive_timeout=60,
+                      deadline=0,
+                      deadline_grace_period=180,
+                      result_line_handler=default_line_result,
+                      identifier=""
+                      ):
+        """
+        run a script in background tracing with a dynamic timeout that its got output
+        Deadline will represent an absolute timeout at which it will be signalled to
+        exit, and yet another minute later a hard kill including sub processes will
+        follow.
+        (is still alive...)
+        """
+        rc_exit = None
+        line_filter = False
+        run_cmd = [executeable] + args
+        children = []
+        if identifier == "":
+            # pylint: disable=global-statement
+            global ID_COUNTER
+            my_no = ID_COUNTER
+            ID_COUNTER += 1
+            identifier = f"IO_{str(my_no)}"
+        print(params)
+        params['identifier'] = identifier
+        if not isinstance(deadline,datetime):
+            if deadline == 0:
+                deadline = datetime.now() + timedelta(seconds=progressive_timeout * 10)
+            else:
+                deadline = datetime.now() + timedelta(seconds=deadline)
+        final_deadline = deadline + timedelta(seconds=deadline_grace_period)
+        print(f"{identifier}: launching {str(run_cmd)}")
+        with psutil.Popen(
+            run_cmd,
+            stdout=PIPE,
+            stderr=PIPE,
+            close_fds=ON_POSIX,
+            cwd=self.cfg.test_data_dir.resolve(),
+        ) as process:
+            queue = Queue()
+            thread1 = Thread(
+                name=f"readIO {identifier}",
+                target=enqueue_stdout,
+                args=(process.stdout, queue, self.connect_instance, identifier, params),
+            )
+            thread2 = Thread(
+                name="readErrIO {identifier}",
+                target=enqueue_stderr,
+                args=(process.stderr, queue, self.connect_instance, identifier, params),
+            )
+            thread1.start()
+            thread2.start()
+
+            try:
+                print(
+                    "{0} me PID:{1} launched PID:{2} with LWPID:{3} and LWPID:{4}".format(
+                        identifier,
+                        str(os.getpid()),
+                        str(process.pid),
+                        str(thread1.native_id),
+                        str(thread2.native_id))
+                )
+            except AttributeError:
+                print(
+                    "{0} me PID:{1} launched PID:{2} with LWPID:N/A and LWPID:N/A".format(
+                        identifier,
+                        str(os.getpid()),
+                        str(process.pid)))
+
+            # read line without blocking
+            have_progressive_timeout = False
+            tcount = 0
+            close_count = 0
+            have_deadline = 0
+            deadline_grace_count = 0
+            while not have_progressive_timeout:
+                # if you want to tail the output, enable this:
+                # out.flush()
+                result_line_handler(tcount, None, params)
+                line = ""
+                try:
+                    line = queue.get(timeout=1)
+                    ret = result_line_handler(0, line, params)
+                    line_filter = line_filter or ret
+                    tcount = 0
+                    if not isinstance(line, tuple):
+                        close_count += 1
+                        print_log(f"{identifier} 1 IO Thead done!", params)
+                        if close_count == 2:
+                            break
+                except Empty:
+                    # print(identifier  + '..' + str(deadline_grace_count))
+                    tcount += 1
+                    have_progressive_timeout = tcount >= progressive_timeout
+                    if have_progressive_timeout:
+                        try:
+                            children = process.children(recursive=True)
+                        except psutil.NoSuchProcess:
+                            pass
+                        process.kill()
+                        kill_children(identifier, params, children)
+                        rc_exit = process.wait()
+                if datetime.now() > deadline:
+                    have_deadline += 1
+                if have_deadline == 1:
+                    add_message_to_report(
+                        params,
+                        f"{identifier} Execution Deadline reached - will trigger signal {self.deadline_signal}!")
+                    # Send the process our break / sigint
+                    try:
+                        children = process.children(recursive=True)
+                    except psutil.NoSuchProcess:
+                        pass
+                    process.send_signal(self.deadline_signal)
+                elif have_deadline > 1 and datetime.now() > final_deadline:
+                    try:
+                        # give it some time to exit:
+                        print_log(f"{identifier} try wait exit:", params)
+                        try:
+                            children = children + process.children(recursive=True)
+                        except psutil.NoSuchProcess:
+                            pass
+                        rc_exit = process.wait(1)
+                        add_message_to_report(params, f"{identifier}  exited: {str(rc_exit)}")
+                        kill_children(identifier, params, children)
+                        print_log(f"{identifier}  closing", params)
+                        process.stderr.close()
+                        process.stdout.close()
+                        break
+                    except psutil.TimeoutExpired:
+                        deadline_grace_count += 1
+                        print_log(f"{identifier} timeout waiting for exit {str(deadline_grace_count)}", params)
+                        # if its not willing, use force:
+                        if deadline_grace_count > deadline_grace_period:
+                            print_log(f"{identifier} getting children", params)
+                            try:
+                                children = process.children(recursive=True)
+                            except psutil.NoSuchProcess:
+                                pass
+                            kill_children(identifier, params, children)
+                            add_message_to_report(params, f"{identifier} killing")
+                            process.kill()
+                            print_log(f"{identifier} waiting", params)
+                            rc_exit = process.wait()
+                            print_log(f"{identifier} closing", params)
+                            process.stderr.close()
+                            process.stdout.close()
+                            break
+            print_log(f"{identifier} IO-Loop done", params)
+            timeout_str = ""
+            if have_progressive_timeout:
+                timeout_str = "TIMEOUT OCCURED!"
+                print(timeout_str)
+                timeout_str += "\n"
+            elif rc_exit is None:
+                print_log(f"{identifier} waiting for regular exit", params)
+                rc_exit = process.wait()
+                print_log(f"{identifier} done", params)
+            kill_children(identifier, params, children)
+            print_log(f"{identifier} joining io Threads", params)
+            thread1.join()
+            thread2.join()
+            print_log(f"{identifier} OK", params)
+
+        return {
+            "progressive_timeout": have_progressive_timeout,
+            "have_deadline": have_deadline,
+            "rc_exit": rc_exit,
+            "line_filter": line_filter,
+        }
diff --git a/jenkins/helper/generate_jenkins_scripts.py b/jenkins/helper/generate_jenkins_scripts.py
deleted file mode 100755
index 252aef634..000000000
--- a/jenkins/helper/generate_jenkins_scripts.py
+++ /dev/null
@@ -1,317 +0,0 @@
-#!/bin/env python3
-""" read test definition, and generate the output for the specified target """
-import argparse
-import sys
-
-#pylint: disable=line-too-long disable=broad-except
-
-# check python 3
-if sys.version_info[0] != 3:
-    print("found python version ", sys.version_info)
-    sys.exit()
-
-
-def generate_fish_output(args, outfile, tests):
-    """ unix/fish conformant test definitions """
-    def output(line):
-        """ output one line """
-        print(line, file=outfile)
-
-    def print_test_func(test, func, varname):
-        """ print one test function """
-        args = " ".join(test["args"])
-        params = test["params"]
-        suffix = params.get("suffix", "-")
-
-        conditions = []
-        if "enterprise" in test["flags"]:
-            conditions.append("isENTERPRISE;")
-        if "ldap" in test["flags"]:
-            conditions.append("hasLDAPHOST;")
-
-        if len(conditions) > 0:
-            conditions_string = " and ".join(conditions) + " and "
-        else:
-            conditions_string = ""
-
-        if "buckets" in params:
-            num_buckets = int(params["buckets"])
-            for i in range(num_buckets):
-                output(
-                    f'{conditions_string}'
-                    f'set {varname} "${varname}""{test["weight"]},{func} \'{test["name"]}\''
-                    f' {i} --testBuckets {num_buckets}/{i} {args}\\n"')
-        else:
-            output(f'{conditions_string}'
-                   f'set {varname} "${varname}""{test["weight"]},{func} \'{test["name"]}\' '
-                   f'{suffix} {args}\\n"')
-
-    def print_all_tests(func, varname):
-        """ iterate over all definitions """
-        for test in tests:
-            print_test_func(test, func, varname)
-
-    if args.cluster:
-        print_all_tests("runClusterTest1", "CT")
-    else:
-        print_all_tests("runSingleTest1", "ST")
-
-
-def generate_ps1_output(args, outfile, tests):
-    """ powershell conformant test definitions """
-    def output(line):
-        """ output one line """
-        print(line, file=outfile)
-
-    for test in tests:
-        params = test["params"]
-        suffix = f' -index "{params["suffix"]}"' if "suffix" in params else ""
-        cluster_str = " -cluster $true" if args.cluster else ""
-        condition_prefix = ""
-        condition_suffix = ""
-        if "enterprise" in test["flags"]:
-            condition_prefix = 'If ($ENTERPRISEEDITION -eq "On") { '
-            condition_suffix = ' }'
-        if "ldap" in test["flags"]:
-            raise Exception("ldap not supported for windows")
-
-        moreargs = ""
-        args_list = test["args"]
-        if len(args_list) > 0:
-            moreargs = f' -moreParams "{" ".join(args_list)}"'
-
-        if "buckets" in params:
-            num_buckets = int(params["buckets"])
-            for i in range(num_buckets):
-                output(f'{condition_prefix}'
-                       f'registerTest -testname "{test["name"]}" -weight {test["wweight"]} '
-                       f'-index "{i}" -bucket "{num_buckets}/{i}"{moreargs}{cluster_str}'
-                       f'{condition_suffix}')
-        else:
-            output(f'{condition_prefix}'
-                   f'registerTest -testname "{test["name"]}"{cluster_str} -weight {test["wweight"]}{suffix}{moreargs}'
-                   f'{condition_suffix}')
-
-
-def filter_tests(args, tests):
-    """ filter testcase by operations target Single/Cluster/full """
-    if args.all:
-        return tests
-
-    filters = []
-    if args.cluster:
-        filters.append(lambda test: "single" not in test["flags"])
-    else:
-        filters.append(lambda test: "cluster" not in test["flags"])
-
-    if args.full:
-        filters.append(lambda test: "!full" not in test["flags"])
-    else:
-        filters.append(lambda test: "full" not in test["flags"])
-
-    if args.format == "ps1":
-        filters.append(lambda test: "!windows" not in test["flags"])
-
-    for one_filter in filters:
-        tests = filter(one_filter, tests)
-    return list(tests)
-
-
-def generate_dump_output(_, outfile, tests):
-    """ interactive version output to inspect comprehension """
-    def output(line):
-        """ output one line """
-        print(line, file=outfile)
-
-    for test in tests:
-        params = " ".join(f"{key}={value}" for key, value in test['params'].items())
-        output(f"{test['name']}")
-        output(f"\tweight: {test['weight']}")
-        output(f"\tweight: {test['wweight']}")
-        output(f"\tflags: {' '.join(test['flags'])}")
-        output(f"\tparams: {params}")
-        output(f"\targs: {' '.join(test['args'])}")
-
-
-formats = {
-    "dump": generate_dump_output,
-    "fish": generate_fish_output,
-    "ps1": generate_ps1_output,
-}
-
-known_flags = {
-    "cluster": "this test requires a cluster",
-    "single": "this test requires a single server",
-    "full": "this test is only executed in full tests",
-    "!full": "this test is only executed in non-full tests",
-    "ldap": "ldap",
-    "enterprise": "this tests is only executed with the enterprise version",
-    "!windows": "test is excluded from ps1 output"
-}
-
-known_parameter = {
-    "buckets": "number of buckets to use for this test",
-    "suffix": "suffix that is appended to the tests folder name",
-    "weight": "weight that controls execution order on Linux / Mac. Lower weights are executed later",
-    "wweight": "windows weight how many resources will the job use in the SUT? Default: 1 in Single server, 4 in Clusters"
-}
-
-
-def print_help_flags():
-    """ print help for flags """
-    print("Flags are specified as a single token.")
-    for flag, exp in known_flags.items():
-        print(f"{flag}: {exp}")
-
-    print("Parameter have a value and specified as param=value.")
-    for flag, exp in known_parameter.items():
-        print(f"{flag}: {exp}")
-
-
-def parse_arguments():
-    """ argv """
-    if "--help-flags" in sys.argv:
-        print_help_flags()
-        sys.exit()
-
-    parser = argparse.ArgumentParser()
-    parser.add_argument("definitions", help="file containing the test definitions", type=str)
-    parser.add_argument("-f", "--format", type=str, choices=formats.keys(), help="which format to output",
-                        default="fish")
-    parser.add_argument("-o", "--output", type=str, help="output file, default is '-', which means stdout", default="-")
-    parser.add_argument("--validate-only", help="validates the test definition file", action="store_true")
-    parser.add_argument("--help-flags", help="prints information about available flags and exits", action="store_true")
-    parser.add_argument("--cluster", help="output only cluster tests instead of single server", action="store_true")
-    parser.add_argument("--full", help="output full test set", action="store_true")
-    parser.add_argument("--all", help="output all test, ignore other filters", action="store_true")
-    args = parser.parse_args()
-
-    return args
-
-
-def validate_params(params, is_cluster):
-    """ check for argument validity """
-    def parse_number(value):
-        """ check value """
-        try:
-            return int(value)
-        except Exception as exc:
-            raise Exception(f"invalid numeric value: {value}") from exc
-
-    def parse_number_or_default(key, default_value=None):
-        """ check number """
-        if key in params:
-            params[key] = parse_number(params[key])
-        elif default_value is not None:
-            params[key] = default_value
-
-    parse_number_or_default("weight", 250)
-    parse_number_or_default("wweight", 4 if is_cluster else 1)
-    parse_number_or_default("buckets")
-
-    return params
-
-
-def validate_flags(flags):
-    """ check whether target flags are valid """
-    if "cluster" in flags and "single" in flags:
-        raise Exception("`cluster` and `single` specified for the same test")
-    if "full" in flags and "!full" in flags:
-        raise Exception("`full` and `!full` specified for the same test")
-
-
-def read_definition_line(line):
-    """ parse one test definition line """
-    bits = line.split()
-    if len(bits) < 1:
-        raise Exception("expected at least one argument: <testname>")
-    name, *remainder = bits
-
-    flags = []
-    params = {}
-    args = []
-
-    for idx, bit in enumerate(remainder):
-        if bit == "--":
-            args = remainder[idx + 1:]
-            break
-
-        if "=" in bit:
-            key, value = bit.split("=", maxsplit=1)
-            params[key] = value
-        else:
-            flags.append(bit)
-
-    # check all flags
-    for flag in flags:
-        if flag not in known_flags:
-            raise Exception(f"Unknown flag `{flag}`")
-
-    # check all params
-    for param in params:
-        if param not in known_parameter:
-            raise Exception(f"Unknown parameter `{param}`")
-
-    validate_flags(flags)
-    params = validate_params(params, 'cluster' in flags)
-
-    return {
-        "name": name,
-        "weight": params["weight"],
-        "wweight": params["wweight"],
-        "flags": flags,
-        "args": args,
-        "params": params
-    }
-
-
-def read_definitions(filename):
-    """ read test definitions txt """
-    tests = []
-    has_error = False
-    with open(filename, "r", encoding="utf-8") as filep:
-        for line_no, line in enumerate(filep):
-            line = line.strip()
-            if line.startswith("#") or len(line) == 0:
-                continue  # ignore comments
-            try:
-                test = read_definition_line(line)
-                tests.append(test)
-            except Exception as exc:
-                print(f"{filename}:{line_no + 1}: {exc}", file=sys.stderr)
-                has_error = True
-    if has_error:
-        raise Exception("abort due to errors")
-    return tests
-
-
-def generate_output(args, outfile, tests):
-    """ generate output """
-    if args.format not in formats:
-        raise Exception(f"Unknown format `{args.format}`")
-    formats[args.format](args, outfile, tests)
-
-
-def get_output_file(args):
-    """ get output file """
-    if args.output == '-':
-        return sys.stdout
-    return open(args.output, "w", encoding="utf-8")
-
-
-def main():
-    """ entrypoint """
-    try:
-        args = parse_arguments()
-        tests = read_definitions(args.definitions)
-        if args.validate_only:
-            return  # nothing left to do
-        tests = filter_tests(args, tests)
-        generate_output(args, get_output_file(args), tests)
-    except Exception as exc:
-        print(exc, file=sys.stderr)
-        sys.exit(1)
-
-
-if __name__ == "__main__":
-    main()
diff --git a/jenkins/helper/monkeypatch_psutil.py b/jenkins/helper/monkeypatch_psutil.py
new file mode 100644
index 000000000..1fe86cece
--- /dev/null
+++ b/jenkins/helper/monkeypatch_psutil.py
@@ -0,0 +1,114 @@
+#!/usr/bin/env python
+"""
+This file is intended to make psutil behave portable on the wintendo, and
+to enable functionality the upstream author doesn't think ready to use.
+"""
+import platform
+import signal
+import subprocess
+import time
+import sys
+
+winver = platform.win32_ver()
+if winver[0]:
+    WINDOWS = True
+    POSIX = False
+    # may throw on elderly wintendos?
+    # this is only here on the wintendo, don't bother me else where.
+    # pylint: disable=no-name-in-module
+    from psutil import Process
+    from psutil import TimeoutExpired
+    from psutil import _psutil_windows as cext
+
+    from psutil._pswindows import WindowsService
+
+    class WindowsServiceMonkey(WindowsService):
+        """
+        We want this code anyways, its good enough!
+        actions
+        XXX: the necessary C bindings for start() and stop() are
+        implemented but for now I prefer not to expose them.
+        I may change my mind in the future. Reasons:
+        - they require Administrator privileges
+        - can't implement a timeout for stop() (unless by using a thread,
+          which sucks)
+        - would require adding ServiceAlreadyStarted and
+          ServiceAlreadyStopped exceptions, adding two new APIs.
+        - we might also want to have modify(), which would basically mean
+          rewriting win32serviceutil.ChangeServiceConfig, which involves a
+          lot of stuff (and API constants which would pollute the API), see:
+          http://pyxr.sourceforge.net/PyXR/c/python24/lib/site-packages/
+              win32/lib/win32serviceutil.py.html#0175
+        - psutil is typically about "read only" monitoring stuff;
+          win_service_* APIs should only be used to retrieve a service and
+          check whether it's running
+        """
+
+        def start(self, timeout=None):
+            """start a windows service"""
+            with self._wrap_exceptions():
+                cext.winservice_start(self.name())
+                if timeout:
+                    giveup_at = time.time() + timeout
+                    while True:
+                        if self.status() == "running":
+                            return
+                        if time.time() > giveup_at:
+                            raise TimeoutExpired(timeout)
+                        time.sleep(0.1)
+
+        def stop(self):
+            """stop windows service"""
+            # Note: timeout is not implemented because it's just not
+            # possible, see:
+            # http://stackoverflow.com/questions/11973228/
+            with self._wrap_exceptions():
+                return cext.winservice_stop(self.name())
+
+    WindowsService.start = WindowsServiceMonkey.start
+    WindowsService.stop = WindowsServiceMonkey.stop
+
+    class ProcessMonkey(Process):
+        """overload this function"""
+
+        def terminate(self):
+            """Terminate the process with SIGTERM pre-emptively checking
+            whether PID has been reused.
+            On Windows this will only work for processes spawned through psutil
+            or started using
+               kwargs['creationflags'] = subprocess.CREATE_NEW_PROCESS_GROUP.
+            """
+            if POSIX:
+                self._send_signal(signal.SIGTERM)
+            else:  # pragma: no cover
+
+                # def sigint_boomerang_handler(signum, frame):
+                #     """do the right thing to behave like linux does"""
+                #     # pylint: disable=unused-argument
+                #     if signum != signal.SIGINT:
+                #         sys.exit(1)
+                #     # pylint: disable=unnecessary-pass
+                #     pass
+                # 
+                # original_sigint_handler = signal.getsignal(signal.SIGINT)
+                # signal.signal(signal.SIGINT, sigint_boomerang_handler)
+                # # only here on the wintendo:
+                # # pylint: disable=no-member
+                # self.send_signal(signal.CTRL_BREAK_EVENT)
+                self.wait()
+                # restore original handler
+                # signal.signal(signal.SIGINT, original_sigint_handler)
+
+    Process.terminate = ProcessMonkey.terminate
+    # pylint: disable=super-init-not-called disable=consider-using-with
+    class Popen(Process):
+        """overload this function"""
+
+        def __init__(self, *args, **kwargs):
+            kwargs["creationflags"] = subprocess.CREATE_NEW_PROCESS_GROUP
+            self.__subproc = subprocess.Popen(*args, **kwargs)
+            self._init(self.__subproc.pid, _ignore_nsp=True)
+
+    from psutil import Popen as patchme_popen
+
+    patchme_popen.__init__ = Popen.__init__
diff --git a/jenkins/helper/test_launch_controller.py b/jenkins/helper/test_launch_controller.py
new file mode 100755
index 000000000..654162481
--- /dev/null
+++ b/jenkins/helper/test_launch_controller.py
@@ -0,0 +1,997 @@
+#!/bin/env python3
+""" read test definition, and generate the output for the specified target """
+import argparse
+from datetime import datetime, timedelta
+import os
+from pathlib import Path
+import platform
+import pprint
+import signal
+import sys
+from threading  import Thread, Lock
+import time
+from traceback import print_exc
+import shutil
+import psutil
+
+from async_client import (
+    ArangoCLIprogressiveTimeoutExecutor,
+    make_logfile_params,
+    logfile_line_result,
+    delete_logfile_params
+)
+
+ZIPFORMAT="gztar"
+try:
+    import py7zr
+    shutil.register_archive_format('7zip', py7zr.pack_7zarchive, description='7zip archive')
+    ZIPFORMAT="7zip"
+except ModuleNotFoundError:
+    pass
+
+# check python 3
+if sys.version_info[0] != 3:
+    print("found unsupported python version ", sys.version_info)
+    sys.exit()
+
+
+IS_WINDOWS = platform.win32_ver()[0] != ""
+IS_MAC = platform.mac_ver()[0] != ""
+
+pp = pprint.PrettyPrinter(indent=4)
+
+all_tests = []
+#pylint: disable=line-too-long disable=broad-except
+
+def sigint_boomerang_handler(signum, frame):
+    """do the right thing to behave like linux does"""
+    # pylint: disable=unused-argument
+    if signum != signal.SIGINT:
+        sys.exit(1)
+    # pylint: disable=unnecessary-pass
+    pass
+
+if IS_WINDOWS:
+    original_sigint_handler = signal.getsignal(signal.SIGINT)
+    signal.signal(signal.SIGINT, sigint_boomerang_handler)
+    # pylint: disable=unused-import
+    # this will patch psutil for us:
+    import monkeypatch_psutil
+
+def get_workspace():
+    """ evaluates the directory to put reports to """
+    if 'INNERWORKDIR' in os.environ:
+        workdir = Path(os.environ['INNERWORKDIR'])
+        if workdir.exists():
+            return workdir
+    if 'WORKDIR' in os.environ:
+        workdir = Path(os.environ['WORKDIR'])
+        if workdir.exists():
+            return workdir
+    #if 'WORKSPACE' in os.environ:
+    #    workdir = Path(os.environ['WORKSPACE'])
+    #    if workdir.exists():
+    #        return workdir
+    return Path.cwd() / 'work'
+
+TEMP = Path("/tmp/")
+if 'TEMP' in os.environ:
+    TEMP = Path(os.environ['TEMP'])
+if 'INNERWORKDIR' in os.environ:
+    TEMP = Path(os.environ['INNERWORKDIR'])
+    wd = TEMP / 'ArangoDB'
+    wd.cwd()
+    TEMP = TEMP / 'tmp'
+else:
+    TEMP = TEMP / 'ArangoDB'
+if not TEMP.exists():
+    TEMP.mkdir(parents=True)
+os.environ['TMPDIR'] = str(TEMP)
+os.environ['TEMP'] = str(TEMP)
+
+def list_all_processes():
+    """list all processes for later reference"""
+    pseaf = "PID  Process"
+    # pylint: disable=catching-non-exception
+    for process in psutil.process_iter(["pid", "name"]):
+        cmdline = process.name
+        try:
+            cmdline = str(process.cmdline())
+            if cmdline == "[]":
+                cmdline = "[" + process.name() + "]"
+        except psutil.AccessDenied:
+            pass
+        except psutil.ProcessLookupError:
+            pass
+        except psutil.NoSuchProcess:
+            pass
+        print(f"{process.pid} {cmdline}")
+    print(pseaf)
+    sys.stdout.flush()
+
+class ArangoshExecutor(ArangoCLIprogressiveTimeoutExecutor):
+    """configuration"""
+
+    def __init__(self, site_config, slot_lock):
+        self.slot_lock = slot_lock
+        self.read_only = False
+        super().__init__(site_config, None)
+
+    def run_testing(self,
+                    testcase,
+                    testing_args,
+                    timeout,
+                    directory,
+                    logfile,
+                    identifier,
+                    verbose
+                    ):
+       # pylint: disable=R0913 disable=R0902
+        """ testing.js wrapper """
+        print('------')
+        print(testing_args)
+        args = [
+            '-c', str(self.cfg.cfgdir / 'arangosh.conf'),
+            "--log.foreground-tty", "true",
+            "--log.force-direct", "true",
+            '--log.level', 'warning',
+            "--log.level", "v8=debug",
+            '--server.endpoint', 'none',
+            '--javascript.allow-external-process-control', 'true',
+            '--javascript.execute', self.cfg.base_path / 'UnitTests' / 'unittest.js',
+            ]
+        run_cmd = args +[
+            '--',
+            testcase,
+            '--testOutput', directory ] + testing_args
+        params = make_logfile_params(verbose, logfile, self.cfg.trace)
+        ret = self.run_monitored(
+            self.cfg.bin_dir / "arangosh",
+            run_cmd,
+            params=params,
+            progressive_timeout=timeout,
+            deadline=self.cfg.deadline,
+            result_line_handler=logfile_line_result,
+            identifier=identifier
+        )
+        delete_logfile_params(params)
+        ret['error'] = params['error']
+        return ret
+
+TEST_LOG_FILES = []
+
+class TestConfig():
+    """ setup of one test """
+    # pylint: disable=too-many-instance-attributes disable=too-many-arguments
+    # pylint: disable=too-many-branches disable=too-many-statements
+    # pylint: disable=too-few-public-methods
+    def __init__(self,
+                 cfg,
+                 name,
+                 suite,
+                 args,
+                 priority,
+                 parallelity,
+                 flags):
+        """ defaults for test config """
+        self.parallelity = parallelity
+        self.launch_delay = 1.3
+        self.progressive_timeout = 100
+        self.priority = priority
+        self.suite = suite
+        self.name = name
+        self.name_enum = name
+        self.crashed = False
+        self.success = True
+        self.structured_results = ""
+        self.summary = ""
+        self.start = None
+        self.finish = None
+        self.delta_seconds = 0
+        self.delta = None
+
+        self.base_logdir = cfg.test_report_dir / self.name
+        if not self.base_logdir.exists():
+            self.base_logdir.mkdir()
+        self.log_file =  cfg.run_root / f'{self.name}.log'
+        # pylint: disable=global-variable-not-assigned
+        global TEST_LOG_FILES
+        try:
+            print(TEST_LOG_FILES.index(str(self.log_file)))
+            raise Exception(f'duplicate testfile {str(self.log_file)}')
+        except ValueError:
+            TEST_LOG_FILES.append(str(self.log_file))
+        self.summary_file = self.base_logdir / 'testfailures.txt'
+        self.crashed_file = self.base_logdir / 'UNITTEST_RESULT_CRASHED.json'
+        self.success_file = self.base_logdir / 'UNITTEST_RESULT_EXECUTIVE_SUMMARY.json'
+        self.report_file =  self.base_logdir / 'UNITTEST_RESULT.json'
+        self.base_testdir = cfg.test_data_dir_x / self.name
+
+        self.args = []
+        for param in args:
+            if param.startswith('$'):
+                paramname = param[1:].upper()
+                if paramname in os.environ:
+                    self.args += os.environ[paramname].split(' ')
+                else:
+                    print("Error: failed to expand environment variable: '" + param + "' for '" + self.name + "'")
+            else:
+                self.args.append(param)
+        self.args += ['--coreCheck', 'true', '--disableMonitor', 'true', '--writeXmlReport', 'true']
+
+
+        if 'filter' in os.environ:
+            self.args += ['--test', os.environ['filter']]
+        if 'sniff' in flags:
+            if IS_WINDOWS:
+                self.args += ['--sniff', 'true',
+                             '--sniffProgram',  os.environ['TSHARK'],
+                             '--sniffDevice', os.environ['DUMPDEVICE']]
+            else:
+                self.args += ['--sniff', 'sudo']
+
+        if 'SKIPNONDETERMINISTIC' in os.environ:
+            self.args += ['--skipNondeterministic', os.environ['SKIPNONDETERMINISTIC']]
+        if 'SKIPTIMECRITICAL' in os.environ:
+            self.args += ['--skipTimeCritical', os.environ['SKIPTIMECRITICAL']]
+
+        if 'BUILDMODE' in os.environ:
+            self.args += [ '--buildType',  os.environ['BUILDMODE'] ]
+
+        if 'DUMPAGENCYONERROR' in os.environ:
+            self.args += [ '--dumpAgencyOnError', os.environ['DUMPAGENCYONERROR']]
+
+        myport = cfg.portbase
+        cfg.portbase += 100
+        self.args += [ '--minPort', str(myport), '--maxPort', str(myport + 99)]
+        if 'SKIPGREY' in os.environ:
+            self.args += [ '--skipGrey', os.environ['SKIPGREY']]
+        if 'ONLYGREY' in os.environ:
+            self.args += [ '--onlyGrey', os.environ['ONLYGREY']]
+
+        if 'vst' in flags:
+            self.args += [ '--vst', 'true']
+        if 'ssl' in flags:
+            self.args += [ '--protocol', 'ssl']
+        if 'http2' in flags:
+            self.args += [ '--http2', 'true']
+        if 'encrypt' in flags:
+            self.args += [ '--encryptionAtRest', 'true']
+
+    def __repr__(self):
+        return f"""
+{self.name} => {self.parallelity}, {self.priority}, {self.success} -- {' '.join(self.args)}"""
+
+    def print_test_log_line(self):
+        """ get visible representation """
+        # pylint: disable=consider-using-f-string
+        resultstr = "Good result in"
+        if not self.success:
+            resultstr = "Bad result in"
+        if self.crashed:
+            resultstr = "Crash occured in"
+        return """
+{1} {0.name} => {0.parallelity}, {0.priority}, {0.success} -- {2}""".format(
+            self,
+            resultstr,
+            ' '.join(self.args))
+
+    def print_testruns_line(self):
+        """ get visible representation """
+        # pylint: disable=consider-using-f-string
+        resultstr = "GOOD"
+        if not self.success:
+            resultstr = "BAD"
+        if self.crashed:
+            resultstr = "CRASH"
+        return """
+<tr><td>{0.name}</td><td align="right">{0.delta}</td><td align="right">{1}</td></tr>""".format(
+            self,
+            resultstr)
+
+def get_priority(test_config):
+    """ sorter function to return the priority """
+    return test_config.priority
+
+class SiteConfig:
+    """ this environment - adapted to oskar defaults """
+    # pylint: disable=too-few-public-methods disable=too-many-instance-attributes
+    def __init__(self, definition_file):
+        print(os.environ)
+        self.trace = False
+        self.timeout = 1800
+        if 'timeLimit'.upper() in os.environ:
+            self.timeout = int(os.environ['timeLimit'.upper()])
+        if psutil.cpu_count() <= 8:
+            print("Small machine detected, trippling deadline!")
+            self.timeout *= 3
+        self.deadline = datetime.now() + timedelta(seconds=self.timeout)
+        self.hard_deadline = datetime.now() + timedelta(seconds=self.timeout + 660)
+        if definition_file.is_file():
+            definition_file = definition_file.parent
+        base_source_dir = (definition_file / '..').resolve()
+        bin_dir = (base_source_dir / 'build' / 'bin').resolve()
+        if IS_WINDOWS:
+            for target in ['RelWithdebInfo', 'Debug']:
+                if (bin_dir / target).exists():
+                    bin_dir = bin_dir / target
+
+        self.cfgdir = base_source_dir / 'etc' / 'relative'
+        self.bin_dir = bin_dir
+        self.base_path = base_source_dir
+        self.test_data_dir = base_source_dir
+        self.passvoid = ''
+        self.run_root = base_source_dir / 'testrun'
+        if self.run_root.exists():
+            shutil.rmtree(self.run_root)
+        self.test_data_dir_x = self.run_root / 'run'
+        self.test_data_dir_x.mkdir(parents=True)
+        self.test_report_dir = self.run_root / 'report'
+        self.test_report_dir.mkdir(parents=True)
+        self.portbase = 7000
+        if 'PORTBASE' in os.environ:
+            self.portbase = int(os.environ['PORTBASE'])
+
+
+def testing_runner(testing_instance, this, arangosh):
+    """ operate one makedata instance """
+    this.start = datetime.now(tz=None)
+    ret = arangosh.run_testing(this.suite,
+                               this.args,
+                               999999999,
+                               this.base_logdir,
+                               this.log_file,
+                               this.name_enum,
+                               True) #verbose?
+    this.success = (
+        not ret["progressive_timeout"] or
+        not ret["have_deadline"] or
+        ret["rc_exit"] == 0
+    )
+    this.finish = datetime.now(tz=None)
+    this.delta = this.finish - this.start
+    this.delta_seconds = this.delta.total_seconds()
+    print(f'done with {this.name_enum}')
+    this.crashed = not this.crashed_file.exists() or this.crashed_file.read_text() == "true"
+    this.success = this.success and this.success_file.exists() and this.success_file.read_text() == "true"
+    if this.report_file.exists():
+        this.structured_results = this.report_file.read_text(encoding="UTF-8", errors='ignore')
+    this.summary = ret['error']
+    if this.summary_file.exists():
+        this.summary += this.summary_file.read_text()
+    with arangosh.slot_lock:
+        testing_instance.running_suites.remove(this.name_enum)
+
+    if this.crashed or not this.success:
+        print(str(this.log_file.name))
+        print(this.log_file.parent / ("FAIL_" + str(this.log_file.name))
+              )
+        failname = this.log_file.parent / ("FAIL_" + str(this.log_file.name))
+        this.log_file.rename(failname)
+        this.log_file = failname
+        if (this.summary == "" and failname.stat().st_size < 1024*10):
+            print("pulling undersized test output into testfailures.txt")
+            this.summary = failname.read_text(encoding='utf-8')
+        with arangosh.slot_lock:
+            if this.crashed:
+                testing_instance.crashed = True
+            testing_instance.success = False
+    testing_instance.done_job(this.parallelity)
+
+def get_socket_count():
+    """ get the number of sockets lingering destruction """
+    counter = 0
+    for socket in psutil.net_connections(kind='inet'):
+        if socket.status in [
+                psutil.CONN_FIN_WAIT1,
+                psutil.CONN_FIN_WAIT1,
+                psutil.CONN_CLOSE_WAIT]:
+            counter += 1
+    return counter
+
+class TestingRunner():
+    """ manages test runners, creates report """
+    # pylint: disable=too-many-instance-attributes
+    def __init__(self, cfg):
+        self.cfg = cfg
+        self.deadline_reached = False
+        self.slot_lock = Lock()
+        self.no_threads = psutil.cpu_count()
+        self.available_slots = round(self.no_threads * 2) #logical=False)
+        if IS_WINDOWS:
+            self.max_load = 0.85
+            self.max_load1 = 0.75
+        else:
+            self.max_load = self.no_threads * 0.9
+            self.max_load1 = self.no_threads * 0.9
+        # self.available_slots += (psutil.cpu_count(logical=True) - self.available_slots) / 2
+        self.used_slots = 0
+        self.scenarios = []
+        self.arangosh = ArangoshExecutor(self.cfg, self.slot_lock)
+        self.workers = []
+        self.running_suites = []
+        self.success = True
+        self.crashed = False
+        self.cluster = False
+
+    def print_active(self):
+        """ output currently active testsuites """
+        with self.slot_lock:
+            print("Running: " + str(self.running_suites) +
+                  " => Active Slots: " + str(self.used_slots) +
+                  " => Load: " + str(psutil.getloadavg()))
+        sys.stdout.flush()
+
+    def done_job(self, parallelity):
+        """ if one job is finished... """
+        with self.slot_lock:
+            self.used_slots -= parallelity
+
+    def launch_next(self, offset, counter):
+        """ launch one testing job """
+        if self.scenarios[offset].parallelity > (self.available_slots - self.used_slots):
+            return False
+        try:
+            sock_count = get_socket_count()
+            if sock_count > 8000:
+                print(f"Socket count: {sock_count}, waiting before spawning more")
+                return False
+        except psutil.AccessDenied:
+            pass
+        load = psutil.getloadavg()
+        if ((load[0] > self.max_load) or
+            (load[1] > self.max_load1)):
+            print(F"Load to high: {str(load)} waiting before spawning more")
+            return False
+        with self.slot_lock:
+            self.used_slots += self.scenarios[offset].parallelity
+        this = self.scenarios[offset]
+        this.name_enum = f"{this.name} {str(counter)}"
+        print(f"launching {this.name_enum}")
+        pp.pprint(this)
+
+        with self.slot_lock:
+            self.running_suites.append(this.name_enum)
+
+        worker = Thread(target=testing_runner,
+                        args=(self,
+                              this,
+                              self.arangosh))
+        worker.name = this.name
+        worker.start()
+        self.workers.append(worker)
+        return True
+
+    def handle_deadline(self):
+        """ here we make sure no worker thread is stuck during its extraordinary shutdown """
+        # 5 minutes for threads to clean up their stuff, else we consider them blocked
+        more_running = True
+        mica = None
+        print(f"Main: {str(datetime.now())} soft deadline reached: {str(self.cfg.deadline)} now waiting for hard deadline {str(self.cfg.hard_deadline)}")
+        while ((datetime.now() < self.cfg.hard_deadline) and more_running):
+            time.sleep(1)
+            with self.slot_lock:
+                more_running = self.used_slots != 0
+        if more_running:
+            print("Main: reaching hard Time limit!")
+            list_all_processes()
+            mica = os.getpid()
+            myself = psutil.Process(mica)
+            children = myself.children(recursive=True)
+            for one_child in children:
+                if one_child.pid != mica:
+                    try:
+                        print(f"Main: killing {one_child.name()} - {str(one_child.pid)}")
+                        one_child.resume()
+                    except psutil.NoSuchProcess:
+                        pass
+                    except psutil.AccessDenied:
+                        pass
+                    try:
+                        one_child.kill()
+                    except psutil.NoSuchProcess:  # pragma: no cover
+                        pass
+            print("Main: waiting for the children to terminate")
+            psutil.wait_procs(children, timeout=20)
+            print("Main: giving workers 20 more seconds to exit.")
+            time.sleep(60)
+            with self.slot_lock:
+                more_running = self.used_slots != 0
+        else:
+            print("Main: workers terminated on time")
+        if more_running:
+            print("Main: force-terminates the python process due to overall unresponsiveness! Geronimoooo!")
+            list_all_processes()
+            sys.stdout.flush()
+            self.success = False
+            if IS_WINDOWS:
+                # pylint: disable=protected-access
+                # we want to exit without waiting for threads:
+                os._exit(4)
+            else:
+                os.kill(mica, signal.SIGKILL)
+                sys.exit(4)
+
+    def testing_runner(self):
+        """ run testing suites """
+        mem = psutil.virtual_memory()
+        os.environ['ARANGODB_OVERRIDE_DETECTED_TOTAL_MEMORY'] = str(int((mem.total * 0.8) / 9))
+
+        start_offset = 0
+        used_slots = 0
+        counter = 0
+        if len(self.scenarios) == 0:
+            raise Exception("no valid scenarios loaded")
+        some_scenario = self.scenarios[0]
+        if not some_scenario.base_logdir.exists():
+            some_scenario.base_logdir.mkdir()
+        if not some_scenario.base_testdir.exists():
+            some_scenario.base_testdir.mkdir()
+        print(self.cfg.deadline)
+        if datetime.now() > self.cfg.deadline:
+            raise ValueError("test already timed out before started?")
+        print(f"Main: Starting {str(datetime.now())} soft deadline will be: {str(self.cfg.deadline)} hard deadline will be: {str(self.cfg.hard_deadline)}")
+        while (datetime.now() < self.cfg.deadline) and (start_offset < len(self.scenarios) or used_slots > 0):
+            used_slots = 0
+            with self.slot_lock:
+                used_slots = self.used_slots
+            if self.available_slots > used_slots and start_offset < len(self.scenarios):
+                print(f"Launching more: {self.available_slots} > {used_slots} {counter}")
+                sys.stdout.flush()
+                if self.launch_next(start_offset, counter):
+                    start_offset += 1
+                    time.sleep(5)
+                    counter += 1
+                    self.print_active()
+                else:
+                    if used_slots == 0 and start_offset >= len(self.scenarios):
+                        print("done")
+                        break
+                    self.print_active()
+                    time.sleep(5)
+            else:
+                self.print_active()
+                time.sleep(5)
+        self.deadline_reached = datetime.now() > self.cfg.deadline
+        if self.deadline_reached:
+            self.handle_deadline()
+        for worker in self.workers:
+            if self.deadline_reached:
+                print("Deadline: Joining threads of " + worker.name)
+            worker.join()
+        if self.success:
+            for scenario in self.scenarios:
+                if not scenario.success:
+                    self.success = False
+
+    def generate_report_txt(self):
+        """ create the summary testfailures.txt from all bits """
+        print(self.scenarios)
+        summary = ""
+        if self.deadline_reached:
+            summary = "Deadline reached during test execution!\n"
+        for testrun in self.scenarios:
+            print(testrun)
+            if testrun.crashed or not testrun.success:
+                summary += testrun.summary
+        print(summary)
+        (get_workspace() / 'testfailures.txt').write_text(summary)
+
+    def cleanup_unneeded_binary_files(self):
+        """ delete all files not needed for the crashreport binaries """
+        shutil.rmtree(str(self.cfg.bin_dir / 'tzdata'))
+        needed = [
+            'arangod',
+            'arangosh',
+            'arangodump',
+            'arangorestore',
+            'arangoimport',
+            'arangobackup',
+            'arangodbtests']
+        for one_file in self.cfg.bin_dir.iterdir():
+            if (one_file.suffix == '.lib' or
+                (one_file.stem not in needed) ):
+                print(f'Deleting {str(one_file)}')
+                one_file.unlink(missing_ok=True)
+
+    def generate_crash_report(self):
+        """ crash report zips """
+        core_max_count = 4 # single server crashdumps...
+        if self.cluster:
+            core_max_count = 15 # 3 cluster instances
+        core_dir = Path.cwd()
+        core_pattern = "core*"
+        if 'COREDIR' in os.environ:
+            core_dir = Path(os.environ['COREDIR'])
+        if IS_MAC:
+            core_dir = Path('/cores')
+        if IS_WINDOWS:
+            core_pattern = "*.dmp"
+        files = sorted(core_dir.glob(core_pattern))
+        if len(files) > core_max_count:
+            count = 0
+            for one_crash_file in files:
+                count += 1
+                if count > core_max_count:
+                    print(f'{core_max_count} reached. will not archive {one_crash_file}')
+                    one_crash_file.unlink(missing_ok=True)
+        is_empty = len(files) == 0
+        if self.crashed or not is_empty:
+            crash_report_file = get_workspace() / datetime.now(tz=None).strftime("crashreport-%d-%b-%YT%H.%M.%SZ")
+            print("creating crashreport: " + str(crash_report_file))
+            sys.stdout.flush()
+            shutil.make_archive(str(crash_report_file),
+                                ZIPFORMAT,
+                                (core_dir / '..').resolve(),
+                                core_dir.name,
+                                True)
+            self.cleanup_unneeded_binary_files()
+            binary_report_file = get_workspace() / datetime.now(tz=None).strftime("binaries-%d-%b-%YT%H.%M.%SZ")
+            print("creating crashreport binary support zip: " + str(binary_report_file))
+            sys.stdout.flush()
+            shutil.make_archive(str(binary_report_file),
+                                ZIPFORMAT,
+                                (self.cfg.bin_dir / '..').resolve(),
+                                self.cfg.bin_dir.name,
+                                True)
+            for corefile in core_dir.glob(core_pattern):
+                print("Deleting corefile " + str(corefile))
+                sys.stdout.flush()
+                corefile.unlink()
+
+    def generate_test_report(self):
+        """ regular testresults zip """
+        tarfile = get_workspace() / datetime.now(tz=None).strftime("testreport-%d-%b-%YT%H.%M.%SZ")
+        print("Creating " + str(tarfile))
+        sys.stdout.flush()
+        shutil.make_archive(self.cfg.run_root / 'innerlogs',
+                            ZIPFORMAT,
+                            (TEMP / '..').resolve(),
+                            TEMP.name)
+
+        shutil.rmtree(TEMP, ignore_errors=False)
+        shutil.make_archive(tarfile,
+                            ZIPFORMAT,
+                            self.cfg.run_root,
+                            '.',
+                            True)
+        shutil.rmtree(self.cfg.run_root, ignore_errors=False)
+
+    def create_log_file(self):
+        """ create the log file with the stati """
+        logfile = get_workspace() / 'test.log'
+        with open(logfile, "w", encoding="utf-8") as filep:
+            for one_scenario in self.scenarios:
+                filep.write(one_scenario.print_test_log_line())
+
+    def create_testruns_file(self):
+        """ create the log file with the stati """
+        logfile = get_workspace() / 'testRuns.html'
+        state = 'GOOD'
+        if not self.success:
+            state  = 'BAD'
+        if self.crashed:
+            state = 'CRASHED'
+        with open(logfile, "w", encoding="utf-8") as filep:
+            filep.write('''
+<table>
+<tr><th>Test</th><th>Runtime</th><th>Status</th></tr>
+''')
+            total = 0
+            for one_scenario in self.scenarios:
+                filep.write(one_scenario.print_testruns_line())
+                total += one_scenario.delta_seconds
+            filep.write(f'''
+<tr style="background-color: red;color: white;"><td>TOTAL</td><td align="right"></td><td align="right">{state}</td></tr>
+</table>
+''')
+
+    def register_test_func(self, cluster, test):
+        """ print one test function """
+        args = test["args"]
+        params = test["params"]
+        suffix = params.get("suffix", "")
+        name = test["name"]
+        if suffix:
+            name += f"_{suffix}"
+
+        if test["wweight"] :
+            parallelity = test["wweight"]
+        if 'single' in test['flags'] and cluster:
+            return
+        if 'cluster' in test['flags'] and not cluster:
+            return
+        if cluster:
+            self.cluster = True
+            if parallelity == 1:
+                parallelity = 4
+            args += ['--cluster', 'true',
+                     '--dumpAgencyOnError', 'true']
+        if "enterprise" in test["flags"]:
+            return
+        if "ldap" in test["flags"] and not 'LDAPHOST' in os.environ:
+            return
+
+        if "buckets" in params:
+            num_buckets = int(params["buckets"])
+            for i in range(num_buckets):
+                self.scenarios.append(
+                    TestConfig(self.cfg,
+                               name + f"_{i}",
+                               test["name"],
+                               [ *args,
+                                 '--index', f"{i}",
+                                 '--testBuckets', f'{num_buckets}/{i}'],
+                               test['weight'],
+                               parallelity,
+                               test['flags']))
+        else:
+            self.scenarios.append(
+                TestConfig(self.cfg,
+                           name,
+                           test["name"],
+                           [ *args],
+                           test['weight'],
+                           parallelity,
+                           test['flags']))
+
+    def sort_by_priority(self):
+        """ sort the tests by their priority for the excecution """
+        self.scenarios.sort(key=get_priority, reverse=True)
+
+    def print_and_exit_closing_stance(self):
+        """ our finaly good buye stance. """
+        print("\n" + "SUCCESS" if self.success else "FAILED")
+        retval = 0
+        if not self.success:
+            retval = 1
+        if self.crashed:
+            retval = 2
+        sys.exit(retval)
+
+def launch(args, tests):
+    """ Manage test execution on our own """
+    runner = TestingRunner(SiteConfig(Path(args.definitions).resolve()))
+    for test in tests:
+        runner.register_test_func(args.cluster, test)
+    runner.sort_by_priority()
+    print(runner.scenarios)
+    try:
+        runner.testing_runner()
+        runner.generate_report_txt()
+        runner.generate_crash_report()
+        runner.generate_test_report()
+    except Exception as exc:
+        print()
+        sys.stderr.flush()
+        sys.stdout.flush()
+        print(exc, file=sys.stderr)
+        print_exc()
+    finally:
+        sys.stderr.flush()
+        sys.stdout.flush()
+        runner.create_log_file()
+        runner.create_testruns_file()
+        runner.print_and_exit_closing_stance()
+
+def filter_tests(args, tests):
+    """ filter testcase by operations target Single/Cluster/full """
+    if args.all:
+        return tests
+
+    filters = []
+    if args.cluster:
+        filters.append(lambda test: "single" not in test["flags"])
+    else:
+        filters.append(lambda test: "cluster" not in test["flags"])
+
+    if args.full:
+        filters.append(lambda test: "!full" not in test["flags"])
+    else:
+        filters.append(lambda test: "full" not in test["flags"])
+
+    if args.gtest:
+        filters.append(lambda test: "gtest" ==  test["name"])
+
+    if args.format == "ps1" or IS_WINDOWS:
+        filters.append(lambda test: "!windows" not in test["flags"])
+
+    for one_filter in filters:
+        tests = filter(one_filter, tests)
+    return list(tests)
+
+
+def generate_dump_output(_, tests):
+    """ interactive version output to inspect comprehension """
+    def output(line):
+        """ output one line """
+        print(line)
+
+    for test in tests:
+        params = " ".join(f"{key}={value}" for key, value in test['params'].items())
+        output(f"{test['name']}")
+        output(f"\tweight: {test['weight']}")
+        output(f"\tweight: {test['wweight']}")
+        output(f"\tflags: {' '.join(test['flags'])}")
+        output(f"\tparams: {params}")
+        output(f"\targs: {' '.join(test['args'])}")
+
+
+formats = {
+    "dump": generate_dump_output,
+    "launch": launch,
+}
+
+known_flags = {
+    "cluster": "this test requires a cluster",
+    "single": "this test requires a single server",
+    "full": "this test is only executed in full tests",
+    "!full": "this test is only executed in non-full tests",
+    "gtest": "only the gtest are to be executed",
+    "ldap": "ldap",
+    "enterprise": "this tests is only executed with the enterprise version",
+    "!windows": "test is excluded from ps1 output"
+}
+
+known_parameter = {
+    "buckets": "number of buckets to use for this test",
+    "suffix": "suffix that is appended to the tests folder name",
+    "weight": "weight that controls execution order on Linux / Mac. Lower weights are executed later",
+    "wweight": "windows weight how many resources will the job use in the SUT? Default: 1 in Single server, 4 in Clusters"
+}
+
+
+def print_help_flags():
+    """ print help for flags """
+    print("Flags are specified as a single token.")
+    for flag, exp in known_flags.items():
+        print(f"{flag}: {exp}")
+
+    print("Parameter have a value and specified as param=value.")
+    for flag, exp in known_parameter.items():
+        print(f"{flag}: {exp}")
+
+
+def parse_arguments():
+    """ argv """
+    if "--help-flags" in sys.argv:
+        print_help_flags()
+        sys.exit()
+
+    parser = argparse.ArgumentParser()
+    parser.add_argument("definitions", help="file containing the test definitions", type=str)
+    parser.add_argument("-f", "--format", type=str, choices=formats.keys(), help="which format to output",
+                        default="launch")
+    parser.add_argument("--validate-only", help="validates the test definition file", action="store_true")
+    parser.add_argument("--help-flags", help="prints information about available flags and exits", action="store_true")
+    parser.add_argument("--cluster", help="output only cluster tests instead of single server", action="store_true")
+    parser.add_argument("--full", help="output full test set", action="store_true")
+    parser.add_argument("--gtest", help="only runt gtest", action="store_true")
+    parser.add_argument("--all", help="output all test, ignore other filters", action="store_true")
+    args = parser.parse_args()
+
+    return args
+
+
+def validate_params(params, is_cluster):
+    """ check for argument validity """
+    def parse_number(value):
+        """ check value """
+        try:
+            return int(value)
+        except Exception as exc:
+            raise Exception(f"invalid numeric value: {value}") from exc
+
+    def parse_number_or_default(key, default_value=None):
+        """ check number """
+        if key in params:
+            if params[key][0] == '*': # factor the default
+                params[key] = default_value * parse_number(params[key][1:])
+            else:
+                params[key] = parse_number(params[key])
+        elif default_value is not None:
+            params[key] = default_value
+
+    parse_number_or_default("weight", 250)
+    parse_number_or_default("wweight", 4 if is_cluster else 1)
+    parse_number_or_default("buckets")
+
+    return params
+
+
+def validate_flags(flags):
+    """ check whether target flags are valid """
+    if "cluster" in flags and "single" in flags:
+        raise Exception("`cluster` and `single` specified for the same test")
+    if "full" in flags and "!full" in flags:
+        raise Exception("`full` and `!full` specified for the same test")
+
+
+def read_definition_line(line):
+    """ parse one test definition line """
+    bits = line.split()
+    if len(bits) < 1:
+        raise Exception("expected at least one argument: <testname>")
+    name, *remainder = bits
+
+    flags = []
+    params = {}
+    args = []
+
+    for idx, bit in enumerate(remainder):
+        if bit == "--":
+            args = remainder[idx + 1:]
+            break
+
+        if "=" in bit:
+            key, value = bit.split("=", maxsplit=1)
+            params[key] = value
+        else:
+            flags.append(bit)
+
+    # check all flags
+    for flag in flags:
+        if flag not in known_flags:
+            raise Exception(f"Unknown flag `{flag}`")
+
+    # check all params
+    for param in params:
+        if param not in known_parameter:
+            raise Exception(f"Unknown parameter `{param}`")
+
+    validate_flags(flags)
+    params = validate_params(params, 'cluster' in flags)
+
+    return {
+        "name": name,
+        "weight": params["weight"],
+        "wweight": params["wweight"],
+        "flags": flags,
+        "args": args,
+        "params": params
+    }
+
+
+def read_definitions(filename):
+    """ read test definitions txt """
+    tests = []
+    has_error = False
+    with open(filename, "r", encoding="utf-8") as filep:
+        for line_no, line in enumerate(filep):
+            line = line.strip()
+            if line.startswith("#") or len(line) == 0:
+                continue  # ignore comments
+            try:
+                test = read_definition_line(line)
+                tests.append(test)
+            except Exception as exc:
+                print(f"{filename}:{line_no + 1}: {exc}", file=sys.stderr)
+                has_error = True
+    if has_error:
+        raise Exception("abort due to errors")
+    return tests
+
+
+def generate_output(args, tests):
+    """ generate output """
+    if args.format not in formats:
+        raise Exception(f"Unknown format `{args.format}`")
+    formats[args.format](args, tests)
+
+def main():
+    """ entrypoint """
+    try:
+        args = parse_arguments()
+        tests = read_definitions(args.definitions)
+        if args.validate_only:
+            return  # nothing left to do
+        tests = filter_tests(args, tests)
+        generate_output(args, tests)
+    except Exception as exc:
+        print(exc, file=sys.stderr)
+        print_exc()
+        sys.exit(1)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/jenkins/helper/tools/__pycache__/asciiprint.cpython-310.pyc b/jenkins/helper/tools/__pycache__/asciiprint.cpython-310.pyc
new file mode 100644
index 000000000..4b37d80be
Binary files /dev/null and b/jenkins/helper/tools/__pycache__/asciiprint.cpython-310.pyc differ
diff --git a/jenkins/helper/tools/asciiprint.py b/jenkins/helper/tools/asciiprint.py
new file mode 100644
index 000000000..85f37267a
--- /dev/null
+++ b/jenkins/helper/tools/asciiprint.py
@@ -0,0 +1,79 @@
+#!env python
+""" removes terminal control sequences and other non ascii characters """
+import unicodedata
+import re
+import sys
+
+is_tty = sys.stdout.isatty()
+PROGRESS_COUNT = 0
+
+# 7-bit C1 ANSI sequences
+ANSI_ESCAPE_B = re.compile(
+    rb"""
+    \x1B  # ESC
+    \xE2  # throbber...
+    \xA0  # throbber...
+    \xA7  # throbber...
+    \x8F  # throbber...
+    \r    # cariage return
+    (?:   # 7-bit C1 Fe (except CSI)
+        [@-Z\\-_]
+    |     # or [ for CSI, followed by a control sequence
+        \[
+        [0-?]*  # Parameter bytes
+        [ -/]*  # Intermediate bytes
+        [@-~]   # Final byte
+    )
+""",
+    re.VERBOSE,
+)
+
+
+def ascii_convert(the_bytes: bytes):
+    """convert string to only be ascii without control sequences"""
+    return ANSI_ESCAPE_B.sub(rb"", the_bytes).decode("utf-8")
+
+
+# 7-bit C1 ANSI sequences
+ANSI_ESCAPE = re.compile(
+    r"""
+    \x1B  # ESC
+    \xE2  # throbber...
+    \xA0  # throbber...
+    \xA7  # throbber...
+    \x8F  # throbber...
+    \r    # cariage return
+    (?:   # 7-bit C1 Fe (except CSI)
+        [@-Z\\-_]
+    |     # or [ for CSI, followed by a control sequence
+        \[
+        [0-?]*  # Parameter bytes
+        [ -/]*  # Intermediate bytes
+        [@-~]   # Final byte
+    )
+""",
+    re.VERBOSE,
+)
+
+
+def ascii_convert_str(the_str: str):
+    """convert string to only be ascii without control sequences"""
+    return ANSI_ESCAPE.sub(rb"", the_str)
+
+
+def ascii_print(string):
+    """convert string to only be ascii without control sequences"""
+    string = ANSI_ESCAPE.sub("", string)
+    print("".join(ch for ch in string if ch == "\n" or unicodedata.category(ch)[0] != "C"))
+
+
+def print_progress(char):
+    """print a throbber alike that immediately is sent to the console"""
+    # pylint: disable=global-statement
+    global PROGRESS_COUNT
+    print(char, end="")
+    PROGRESS_COUNT += 1
+    if not is_tty and PROGRESS_COUNT % 10 == 0:
+        # add a linebreak so we see something in jenkins (if):
+        print("\n")
+    sys.stdout.flush()
diff --git a/jenkins/runCatchTest.ps1 b/jenkins/runCatchTest.ps1
index bb1a75c46..a97b6ffcb 100644
--- a/jenkins/runCatchTest.ps1
+++ b/jenkins/runCatchTest.ps1
@@ -6,7 +6,7 @@ Copy-Item -Force "$env:WORKSPACE\jenkins\helper\prepareOskar.ps1" $pwd
 skipPackagingOn
 staticExecutablesOn
 setAllLogsToWorkspace
-catchtest
+gtest
 releaseMode
 
 switchBranches $env:ARANGODB_BRANCH $env:ENTERPRISE_BRANCH
diff --git a/scripts/lib/Utils.psm1 b/scripts/lib/Utils.psm1
index 631886877..973a5448a 100644
--- a/scripts/lib/Utils.psm1
+++ b/scripts/lib/Utils.psm1
@@ -16,105 +16,6 @@ Function log([array]$log)
     comm
 }
 
-Function createReport
-{
-    $date = $(Get-Date).ToUniversalTime().ToString("yyyy-MM-ddTHH.mm.ssZ")
-    $date | Add-Content "$env:TMP\testProtocol.txt"
-    $global:badtests = $null
-    new-item $env:TMP\oskar-junit-report -itemtype directory
-    ForEach ($dir in (Get-ChildItem -Path $env:TMP  -Directory -Filter "*.out"))
-    {
-        $reportFound = $false
-        If ($(Get-ChildItem -filter "*.xml" -path $dir.FullName | Measure-Object | Select -ExpandProperty Count) -gt 0) {
-          Copy-Item -Path "$($dir.FullName)\*.xml" $env:TMP\oskar-junit-report
-        }
-        Write-Host "Looking at directory $($dir.BaseName)"
-        If(Test-Path -PathType Leaf -Path "$($dir.FullName)\UNITTEST_RESULT_EXECUTIVE_SUMMARY.json")
-            {
-                        $reportFound = $true
-                        If(-Not($(Get-Content "$($dir.FullName)\UNITTEST_RESULT_EXECUTIVE_SUMMARY.json") -eq "true"))
-                        {
-                            $global:result = "BAD"
-                            $file = $($dir.BaseName).Substring(0,$($dir.BaseName).Length-4)+".stdout.log"
-                            Write-Host "Bad result in $file"
-                            "Bad result in $file" | Add-Content "$env:TMP\testProtocol.txt"
-                            $global:badtests = $global:badtests + "Bad result in $file`r`n"
-                        }
-            }
-        If(Test-Path -PathType Leaf -Path "$($dir.FullName)\UNITTEST_RESULT_CRASHED.json")
-            {
-                        $reportFound = $true
-                        If(-Not($(Get-Content "$($dir.FullName)\UNITTEST_RESULT_CRASHED.json") -eq "false"))
-                        {
-                            $global:result = "BAD"
-                            $file = $($dir.BaseName).Substring(0,$($dir.BaseName).Length-4)+".stdout.log"
-                            Write-Host "Crash occured in $file"
-                            $global:hasTestCrashes = $True
-                            "Crash occured in $file" | Add-Content "$env:TMP\testProtocol.txt"
-                            $global:badtests = $global:badtests + "Crash occured in $file`r`n"
-                        }
-            }
-        If ($reportFound -ne $true)
-            {
-                Write-Host "No Testresult found at directory $($dir.BaseName)"
-                $global:result = "BAD"
-                "No Testresult found at directory $($dir.BaseName)" | Add-Content "$env:TMP\testProtocol.txt"
-                $global:badtests = $global:badtests + "No Testresult found at directory $($dir.BaseName)`r`n"   
-            }
-    }
-    $global:result | Add-Content "$env:TMP\testProtocol.txt"
-    If($global:ENABLE_REPORT_DUMPS -eq "on" -and (Get-ChildItem -Path "$global:COREDIR" -Filter "arango*.dmp" -Recurse -ErrorAction Continue -Force))
-    {
-        Write-Host "7zip -Path "$global:ARANGODIR\build\bin\$BUILDMODE\arango*.exe "-DestinationPath "$INNERWORKDIR\crashreport-$date.7z
-        7zip -Path "$global:ARANGODIR\build\bin\$BUILDMODE\arango*.exe" -DestinationPath "$INNERWORKDIR\crashreport-$date.7z"
-        ForEach ($core in (Get-ChildItem -Path "$global:COREDIR" -Filter "arango*.dmp" -Recurse -ErrorAction SilentlyContinue))
-        {
-            Write-Host "7zip -Path $($core.FullName) -DestinationPath `"$INNERWORKDIR\crashreport-$date.7z`""   
-            7zip -Path $($core.FullName) -DestinationPath "$INNERWORKDIR\crashreport-$date.7z"
-            Write-Host "Remove-Item $($core.FullName)"
-            Remove-Item $($core.FullName)
-        }
-        ForEach ($pdb in (Get-ChildItem -Path "$global:ARANGODIR\build\bin\$BUILDMODE\" -Filter "arango*.pdb" -Recurse -ErrorAction SilentlyContinue))
-        {
-            Write-Host "7zip -Path $($pdb.FullName) -DestinationPath `"$INNERWORKDIR\crashreport-$date.7z`""
-            7zip -Path $($pdb.FullName) -DestinationPath "$INNERWORKDIR\crashreport-$date.7z"
-        }
-    }
-    If(Test-Path -PathType Leaf -Path "$global:ARANGODIR\innerlogs.7z")
-    {
-        Remove-Item -Force "$global:ARANGODIR\innerlogs.7z"
-    }
-    Write-Host "7zip -Path `"$env:TMP\`" -DestinationPath `"$global:ARANGODIR\innerlogs.7z`""
-    7zip -Path "$env:TMP\" -DestinationPath "$global:ARANGODIR\innerlogs.7z"
-    ForEach ($log in $(Get-ChildItem -Path $global:ARANGODIR -Filter "*.log"))
-    {
-        Write-Host "7zip -Path $($log.FullName)  -DestinationPath `"$INNERWORKDIR\testreport-$date.7z`""
-        7zip -Path $($log.FullName) -DestinationPath "$INNERWORKDIR\testreport-$date.7z"
-    }
-    ForEach ($archive in $(Get-ChildItem -Path $global:ARANGODIR -Filter "*.7z"))
-    {
-        Write-Host "7zip -Path $($archive.FullName) -DestinationPath `"$INNERWORKDIR\testreport-$date.7z`""
-        7zip -Path $($archive.FullName) -DestinationPath "$INNERWORKDIR\testreport-$date.7z"
-    }
-    Write-Host "7zip -Path $env:TMP\testProtocol.txt -DestinationPath `"$INNERWORKDIR\testreport-$date.7z`""
-    7zip -Path "$env:TMP\testProtocol.txt" -DestinationPath "$INNERWORKDIR\testreport-$date.7z"
-
-    log "$date $TESTSUITE $global:result M:$MAINTAINER $BUILDMODE E:$ENTERPRISEEDITION $STORAGEENGINE",$global:repoState,$global:repoStateEnterprise,$badtests
-    If(Test-Path -PathType Leaf -Path "$INNERWORKDIR\testfailures.txt")
-    {
-        Remove-Item -Force "$INNERWORKDIR\testfailures.txt"
-    }
-
-    If($global:result -eq "BAD" -Or $global:hasTestCrashes)
-    {
-        $global:oskarErrorMessage | Add-Content "$INNERWORKDIR\testfailures.txt"
-        ForEach ($file in (Get-ChildItem -Path $env:TMP -Filter "testfailures.txt" -Recurse).FullName)
-        {
-            Get-Content $file | Add-Content "$INNERWORKDIR\testfailures.txt"; comm
-        }
-    }
-}
-
 ################################################################################
 # Test main control
 ################################################################################
@@ -151,9 +52,9 @@ Function runTests
             registerSingleTests
             Break
         }
-        "catchtest"
+        "gtest"
         {
-            registerTest -testname "catch"
+            registerTest -testname "gtest"
             Break
         }
         "resilience"
@@ -174,25 +75,6 @@ Function runTests
             Break
         }
     }
-
-    If ($global:result -eq "GOOD" -And $global:ok)
-    {
-        LaunchController $global:TESTSUITE_TIMEOUT
-        createReport
-    }
-    Else
-    {
-        $global:result = "BAD"
-    }
-
-    If($global:result -eq "GOOD")
-    {
-        Set-Variable -Name "ok" -Value $true -Scope global
-    }
-    Else
-    {
-        Set-Variable -Name "ok" -Value $false -Scope global
-    }
 }
 
 Function waitForTimeWaitSockets() {
@@ -206,117 +88,6 @@ Function waitForTimeWaitSockets() {
     } while ($TimeWait -gt 2500)
 }
 
-Function launchTest($which) {
-    waitForTimeWaitSockets
-    Push-Location $pwd
-    Set-Location $global:ARANGODIR; comm
-    $arangosh = "$global:ARANGODIR\build\bin\$BUILDMODE\arangosh.exe"
-    $test = $global:launcheableTests[$which]
-    Write-Host "Test: " $test['testname'] " - " $test['identifier']
-    Write-Host "Time: $((Get-Date).ToUniversalTime().ToString('yyyy-MM-ddTHH.mm.ssZ'))"
-    Write-Host $arangosh " --- " $test['commandline']
-    Write-Host "-RedirectStandardOutput " $test['StandardOutput']
-    Write-Host "-RedirectStandardError " $test['StandardError']
-
-    $process = $(Start-Process -FilePath "$arangosh" -ArgumentList $test['commandline'] -RedirectStandardOutput $test['StandardOutput'] -RedirectStandardError $test['StandardError'] -PassThru)
-
-    $global:launcheableTests[$which]['pid'] = $process.Id
-    $global:launcheableTests[$which]['running'] = $true
-    $global:launcheableTests[$which]['launchDate'] = $((Get-Date).ToUniversalTime().ToString('yyyy-MM-ddTHH.mm.ssZ'))
-    If(-not($process.ExitCode -eq $null))
-    {
-        Write-Host "Error: Launching Test"
-        $process | Format-List -Property *
-    }
-
-    $str=$($test | where {($_.Name -ne "commandline")} | Out-String)
-    Write-Host $str
-    $global:launcheableTests[$which]['process'] = $process
-    Pop-Location
-}
-
-Function registerTest($testname, $index, $bucket, $filter, $moreParams, $cluster, $weight, $sniff, [switch]$vst, [switch]$http2,[switch]$encrypt,[switch]$ssl)
-{
-    $testWeight = 1
-    $testparams = ""
-    $dumpAgencyOnError = ""
-
-    $output = $testname.replace("*", "all")
-    If ($index) {
-      $output = $output+"_$index"
-    }
-    If ($filter) {
-       $testparams = $testparams+" --test $filter"
-    }
-    If ($bucket) {
-        $testparams = $testparams+" --testBuckets $bucket"
-    }
-    If ($cluster -eq $true)
-    {
-        $testWeight = 4
-        $cluster = "true"
-        $dumpAgencyOnError = "true"
-    }
-    Else
-    {
-        $cluster = "false"
-        $dumpAgencyOnError = "false"
-    }
-    If ($testname -eq "agency")
-    {
-        $dumpAgencyOnError = "true"
-    }
-    If ($weight) {
-      $testWeight = $weight
-    }
-
-    If ($vst) {
-      $testparams = $testparams + " --vst true"
-    }
-
-    If ($ssl) {
-      $testparams = $testparams + " --protocol ssl"
-    }
-
-    If ($http2) {
-      $testparams = $testparams + " --http2 true"
-    }
-
-    If ($encrypt) {
-      $testparams = $testparams + " --encryptionAtRest true"
-    }
-
-    If ($sniff) {
-      $testparams = $testparams + " --sniff true --sniffProgram `"$global:TSHARK`" --sniffDevice $global:dumpDevice"
-    }
-
-    $testparams = $testparams + " --cluster $cluster --coreCheck true --storageEngine $STORAGEENGINE --minPort $global:portBase --maxPort $($global:portBase + 99) --skipNondeterministic $global:SKIPNONDETERMINISTIC --skipTimeCritical $global:SKIPTIMECRITICAL --writeXmlReport true --skipGrey $global:SKIPGREY --dumpAgencyOnError $dumpAgencyOnError --onlyGrey $global:ONLYGREY --buildType $BUILDMODE --disableMonitor true"
-
-    New-Item -Path "$env:TMP\$output.out" -ItemType Directory
-    $testparams = $testparams + " --testOutput $env:TMP\$output.out"
-    $testparams = $testparams + " " + $moreParams
-    If (-Not ([string]::IsNullOrEmpty($global:RUBY))) {
-      $testparams = $testparams + " --ruby " + $global:RUBY
-    }
-
-    $PORT = Get-Random -Minimum 20000 -Maximum 65535
-    $i = $global:testCount
-    $global:testCount = $global:testCount+1
-    $global:launcheableTests += @{
-      running=$false;
-      weight=$testWeight;
-    testname=$testname;
-    identifier=$output;
-      commandline=" -c $global:ARANGODIR\etc\relative\arangosh.conf --log.level warning --server.endpoint tcp://127.0.0.1:$PORT --javascript.execute $global:ARANGODIR\UnitTests\unittest.js -- $testname $testparams";
-      StandardOutput="$global:ARANGODIR\$output.stdout.log";
-      StandardError="$global:ARANGODIR\$output.stderr.log";
-      pid=-1;
-    }
-    $global:maxTestCount = $global:maxTestCount+1
-
-    $global:portBase = $($global:portBase + 100)
-    comm
-}
 
 Function StopProcessWithChildren ($PidToKill, $WaitTimeout)
 {
@@ -334,66 +105,3 @@ Function StopProcessWithChildren ($PidToKill, $WaitTimeout)
     }
 }
 
-Function LaunchController($seconds)
-{
-    $timeSlept = 0;
-    $nextLauncheableTest = 0
-    $currentScore = 0
-    $currentRunning = 1
-    $maxLauncheableTests = $global:launcheableTests.Length
-    $numberTestsSlots = [math]::Round($global:numberSlots * 0.9) # Should leave 10% of slots free for $global:numberSlots > 4
-    While (($seconds -gt 0) -and (($currentRunning -gt 0) -or ($nextLauncheableTest -lt $maxLauncheableTests))) {
-        while (($currentScore -lt $numberTestsSlots) -and ($nextLauncheableTest -lt $global:maxTestCount)) {
-            Write-Host "Launching $nextLauncheableTest '" $global:launcheableTests[$nextLauncheableTest ]['identifier'] "'"
-            launchTest $nextLauncheableTest
-            $currentScore = $currentScore+$global:launcheableTests[$nextLauncheableTest ]['weight']
-            Start-Sleep 20
-            $seconds = $seconds - 20
-            $nextLauncheableTest = $nextLauncheableTest+1
-        }
-        $currentRunning = 0
-        $currentRunningNames = @()
-        ForEach ($test in $global:launcheableTests) {
-            If ($test['running']) {
-                If ($test['process'].HasExited) {
-                    $currentScore = $currentScore - $test['weight']
-                    Write-Host "$((Get-Date).ToUniversalTime().ToString('yyyy-MM-ddTHH.mm.ssZ')) Testrun finished: "$test['identifier'] $test['launchdate']
-                    $str=$($test | where {($_.Name -ne "commandline")} | Out-String)
-                    $test['running'] = $false
-                }
-                Else {
-                    $currentRunningNames += $test['identifier']
-                    $currentRunning = $currentRunning+1
-                }
-            }
-        }
-        Start-Sleep 5
-        $a = $currentRunningNames -join ","
-        Write-Host "$((Get-Date).ToUniversalTime().ToString('yyyy-MM-ddTHH.mm.ssZ')) - Waiting  - "$seconds" - Running Tests: "$a
-        $seconds = $seconds - 5
-    }
-    If ($seconds -lt 1) {
-      Write-Host "tests timeout reached. Current state of worker jobs:"
-    }
-    Else {
-      Write-Host "tests done. Current state of worker jobs:"
-    }
-    $str=$global:launcheableTests | Out-String
-    Write-Host $str
-
-    Get-WmiObject win32_process | Out-File -filepath $env:TMP\processes-before.txt
-    Write-Host "$((Get-Date).ToUniversalTime().ToString('yyyy-MM-ddTHH.mm.ssZ')) we have $currentRunning test(s) that timed out! Currently running processes:"
-    $SessionId = [System.Diagnostics.Process]::GetCurrentProcess().SessionId
-    ForEach ($test in $global:launcheableTests) {
-        If ($test['pid'] -gt 0 -And $test['running'] -And (Get-Process -Id $test['pid'] -ErrorAction SilentlyContinue)) {
-            $global:oskarErrorMessage = $global:oskarErrorMessage + "Oskar is killing this test due to timeout: " + $test['testname'] + "`n"
-            Write-Host "Testrun timeout:"
-            $str = $($test | where {($_.Name -ne "commandline")} | Out-String)
-            Write-Host $str
-            StopProcessWithChildren $test['pid'] 3
-            $global:result = "BAD"
-        }
-    }
-    Get-WmiObject win32_process | Out-File -filepath $env:TMP\processes-after.txt 
-    comm
-}
diff --git a/scripts/runFullTests.fish b/scripts/runFullTests.fish
index 1af7f625b..9de436b1f 100755
--- a/scripts/runFullTests.fish
+++ b/scripts/runFullTests.fish
@@ -8,68 +8,32 @@ set -xg ADDITIONAL_OPTIONS $argv
 ## Single tests: runtime,command
 ################################################################################
 
-set -l ST
-echo "Using test definitions from arangodb repo"
-python3 "$WORKSPACE/jenkins/helper/generate_jenkins_scripts.py" "$INNERWORKDIR/ArangoDB/tests/test-definitions.txt" -f fish --full | source
-
-set -g STS (echo -e $ST | fgrep , | sort -rn | awk -F, '{print $2}')
-set -g STL (count $STS)
-
 function launchSingleTests
-  set -g launchCount (math $launchCount + 1)
-
-  if test $launchCount -gt $STL
-    return 0
-  end
-
-  set -l test $STS[$launchCount]
-
-  if test -n "$TEST"
-    if echo $test | fgrep -q "$TEST"
-      echo "Running test '$test' (contains '$TEST')"
-    else
-      echo "Skipping test '$test' (does not contain '$TEST')"
-      return 1
-    end
-  end
-
-  eval $test
-  return 1
+  echo "Using test definitions from arangodb repo"
+  python3 "$WORKSPACE/jenkins/helper/test_launch_controller.py" "$INNERWORKDIR/ArangoDB/tests/test-definitions.txt" -f launch --full
+  and set -xg result "GOOD"
+  or set -xg result "BAD"
 end
 
 ################################################################################
 ## Catch tests
 ################################################################################
 
-function launchCatchTest
-  switch $launchCount
-    case  0 ; runCatchTest1 catch -
-    case '*' ; return 0
-  end
-  set -g launchCount (math $launchCount + 1)
-  return 1
+function launchGTest
+  python3 "$WORKSPACE/jenkins/helper/test_launch_controller.py" "$INNERWORKDIR/ArangoDB/tests/test-definitions.txt" -f launch --gtest
+  and set -xg result "GOOD"
+  or set -xg result "BAD"
 end
 
 ################################################################################
 ## Cluster tests: runtime,command
 ################################################################################
 
-set -l CT
-echo "Using test definitions from arangodb repo"
-python3 "$WORKSPACE/jenkins/helper/generate_jenkins_scripts.py" "$INNERWORKDIR/ArangoDB/tests/test-definitions.txt" -f fish --full --cluster | source
-
-set -g CTS (echo -e $CT | fgrep , | sort -rn | awk -F, '{print $2}')
-set -g CTL (count $CTS)
-
 function launchClusterTests
-  set -g launchCount (math $launchCount + 1)
-
-  if test $launchCount -gt $CTL
-    return 0
-  end
-
-  eval $CTS[$launchCount]
-  return 1
+  echo "Using test definitions from arangodb repo"
+  python3 "$WORKSPACE/jenkins/helper/test_launch_controller.py" "$INNERWORKDIR/ArangoDB/tests/test-definitions.txt" -f launch --cluster --full
+  and set -xg result "GOOD"
+  or set -xg result "BAD"
 end
 
 ################################################################################
@@ -97,19 +61,27 @@ set -g suiteRunner ""
 switch $TESTSUITE
   case "cluster"
     resetLaunch 4
-    set timeLimit 16200
+    set -xg timeLimit 16200
     set suiteRunner "launchClusterTests"
   case "single"
     resetLaunch 1
-    set timeLimit 9000
+    set -xg timeLimit 9000
     set suiteRunner "launchSingleTests"
+  case "gtest"
+    resetLaunch 1
+    set -xg  timeLimit 1800
+    set suiteRunner "launchGTest"
   case "catchtest"
     resetLaunch 1
-    set timeLimit 1800
-    set suiteRunner "launchCatchTest"
+    set -xg  timeLimit 1800
+    set suiteRunner "launchGTest"
+  case "resilience"
+    resetLaunch 4
+    set -xg timeLimit 3600
+    set suiteRunner "launchResilienceTests"
   case "resilience"
     resetLaunch 4
-    set timeLimit 10800
+    set -xg timeLimit 10800
     set suiteRunner "launchResilienceTests"
   case "*"
     echo Unknown test suite $TESTSUITE
@@ -130,11 +102,9 @@ if test "$SAN" = "On"
   end
 end
 
-set evalCmd "waitOrKill $timeLimit $suiteRunner"
-eval $evalCmd
-set timeout $status
+eval "$suiteRunner"
 
-createReport
+echo "RESULT: $result"
 
 if test $result = GOOD -a $timeout = 0
   exit 0
diff --git a/scripts/runFullTests.ps1 b/scripts/runFullTests.ps1
index 728dc24b2..79045f60e 100644
--- a/scripts/runFullTests.ps1
+++ b/scripts/runFullTests.ps1
@@ -10,26 +10,10 @@ Function global:registerSingleTests()
 
     Write-Host "Registering tests..."
 
-    $global:TESTSUITE_TIMEOUT = 9000
+    $env:TIMELIMIT = 9000
     Write-Host "Using test definitions from repo..."
-    Try
-    {
-        $out = python "$env:WORKSPACE\jenkins\helper\generate_jenkins_scripts.py" "$INNERWORKDIR\ArangoDB\tests\test-definitions.txt" -f ps1 --full
-        If ($LASTEXITCODE -eq 0)
-        {
-            echo $out | Invoke-Expression -ErrorAction Stop
-        }
-        Else
-        {
-            throw "$out"
-        }
-        Set-Variable -Name "ok" -Value $true -Scope global
-    }
-    Catch
-    {
-        Write-Host "Error: $_"
-        Set-Variable -Name "ok" -Value $false -Scope global
-    }
+    pip install py7zr
+    proc -process "python.exe" -argument "$env:WORKSPACE\jenkins\helper\test_launch_controller.py $INNERWORKDIR\ArangoDB\tests\test-definitions.txt -f launch --full" -logfile $false -priority "Normal"
 }
 
 Function global:registerClusterTests()
@@ -37,26 +21,10 @@ Function global:registerClusterTests()
     noteStartAndRepoState
     Write-Host "Registering tests..."
 
-    $global:TESTSUITE_TIMEOUT = 18000
+    $env:TIMELIMIT = 16200
     Write-Host "Using test definitions from repo..."
-    Try
-    {
-        $out = python "$env:WORKSPACE\jenkins\helper\generate_jenkins_scripts.py" "$INNERWORKDIR\ArangoDB\tests\test-definitions.txt" -f ps1 --full --cluster
-        If ($LASTEXITCODE -eq 0)
-        {
-            echo $out | Invoke-Expression -ErrorAction Stop
-        }
-        Else
-        {
-            throw "$out"
-        }
-        Set-Variable -Name "ok" -Value $true -Scope global
-    }
-    Catch
-    {
-        Write-Host "Error: $_"
-        Set-Variable -Name "ok" -Value $false -Scope global
-    }
+    pip install py7zr
+    proc -process "python.exe" -argument "$env:WORKSPACE\jenkins\helper\test_launch_controller.py $INNERWORKDIR\ArangoDB\tests\test-definitions.txt -f launch --full --cluster" -logfile $false -priority "Normal"
 }
 
 runTests
diff --git a/scripts/runOneTest.fish b/scripts/runOneTest.fish
index 222fc695a..52f51436b 100755
--- a/scripts/runOneTest.fish
+++ b/scripts/runOneTest.fish
@@ -35,10 +35,10 @@ switch $TESTSUITE
     and runSingleTest1 $TEST - $argv[3..-1]
     and waitOrKill 120 ""
     createReport
-  case "catchtest"
+  case "gtest"
     resetLaunch 1
     and echo "Running $TEST in $TESTUITE with args '$argv[3..-1]'"
-    and runCatchTest1 catch - $argv[3..-1]
+    and runGTest1 gtest - $argv[3..-1]
     and waitOrKill 120 ""
     createReport
   case "*"
diff --git a/scripts/runTests.fish b/scripts/runTests.fish
index 1670f8e6c..f87a34892 100755
--- a/scripts/runTests.fish
+++ b/scripts/runTests.fish
@@ -6,70 +6,32 @@ source $SCRIPTS/lib/tests.fish
 ## Single tests: runtime,command
 ################################################################################
 
-set -l ST
-echo "Using test definitions from arangodb repo"
-python3 "$WORKSPACE/jenkins/helper/generate_jenkins_scripts.py" "$INNERWORKDIR/ArangoDB/tests/test-definitions.txt" -f fish | source
-
-set -g STS (echo -e $ST | fgrep , | sort -rn | awk -F, '{print $2}')
-set -g STL (count $STS)
-echo $STS
-
 function launchSingleTests
-  set -g launchCount (math $launchCount + 1)
-
-  if test $launchCount -gt $STL
-    return 0
-  end
-
-  set -l test $STS[$launchCount]
-
-  if test -n "$TEST"
-    if echo $test | fgrep -q "$TEST"
-      echo "Running test '$test' (contains '$TEST')"
-    else
-      echo "Skipping test '$test' (does not contain '$TEST')"
-      return 1
-    end
-  end
-
-  eval $test
-  return 1
+  echo "Using test definitions from arangodb repo"
+  python3 "$WORKSPACE/jenkins/helper/test_launch_controller.py" "$INNERWORKDIR/ArangoDB/tests/test-definitions.txt" -f launch
+  and set -xg result "GOOD"
+  or set -xg result "BAD"
 end
 
 ################################################################################
 ## Catch tests
 ################################################################################
 
-function launchCatchTest
-  switch $launchCount
-    case  0 ; runCatchTest1 catch -
-    case '*' ; return 0
-  end
-  set -g launchCount (math $launchCount + 1)
-  return 1
+function launchGTest
+  python3 "$WORKSPACE/jenkins/helper/test_launch_controller.py" "$INNERWORKDIR/ArangoDB/tests/test-definitions.txt" -f launch --gtest
+  and set -xg result "GOOD"
+  or set -xg result "BAD"
 end
 
 ################################################################################
 ## Cluster tests: runtime,command
 ################################################################################
 
-set -l CT
-echo "Using test definitions from arangodb repo"
-python3 "$WORKSPACE/jenkins/helper/generate_jenkins_scripts.py" "$INNERWORKDIR/ArangoDB/tests/test-definitions.txt" -f fish --cluster | source
-  
-set -g CTS (echo -e $CT | fgrep , | sort -rn | awk -F, '{print $2}')
-set -g CTL (count $CTS)
-echo $CTS
-
 function launchClusterTests
-  set -g launchCount (math $launchCount + 1)
-
-  if test $launchCount -gt $CTL
-    return 0
-  end
-
-  eval $CTS[$launchCount]
-  return 1
+  echo "Using test definitions from arangodb repo"
+  python3 "$WORKSPACE/jenkins/helper/test_launch_controller.py" "$INNERWORKDIR/ArangoDB/tests/test-definitions.txt" -f launch --cluster
+  and set -xg result "GOOD"
+  or set -xg result "BAD"
 end
 
 ################################################################################
@@ -97,19 +59,23 @@ set -g suiteRunner ""
 switch $TESTSUITE
   case "cluster"
     resetLaunch 4
-    set timeLimit 4200
+    set -xg timeLimit 4200
     set suiteRunner "launchClusterTests"
   case "single"
     resetLaunch 1
-    set timeLimit 3900
+    set -xg  timeLimit 3900
     set suiteRunner "launchSingleTests"
+  case "gtest"
+    resetLaunch 1
+    set -xg  timeLimit 1800
+    set suiteRunner "launchGTest"
   case "catchtest"
     resetLaunch 1
-    set timeLimit 1800
-    set suiteRunner "launchCatchTest"
+    set -xg  timeLimit 1800
+    set suiteRunner "launchGTest"
   case "resilience"
     resetLaunch 4
-    set timeLimit 3600
+    set -xg timeLimit 3600
     set suiteRunner "launchResilienceTests"
   case "*"
     echo Unknown test suite $TESTSUITE
@@ -130,16 +96,11 @@ if test "$SAN" = "On"
   end
 end
 
-set evalCmd "waitOrKill $timeLimit $suiteRunner"
-eval $evalCmd
-set timeout $status
-
-createReport
+eval "$suiteRunner"
 
 echo "RESULT: $result"
-echo "TIMEOUT: $timeout"
 
-if test $result = GOOD -a $timeout = 0
+if test $result = GOOD
   exit 0
 else
   exit 1
diff --git a/scripts/runTests.ps1 b/scripts/runTests.ps1
index f4e6a683a..97843cf1f 100644
--- a/scripts/runTests.ps1
+++ b/scripts/runTests.ps1
@@ -10,27 +10,11 @@ Function global:registerSingleTests()
 
     Write-Host "Registering tests..."
 
-    $global:TESTSUITE_TIMEOUT = 3900
+    $env:TIMELIMIT = 3900
 
     Write-Host "Using test definitions from repo..."
-    Try
-    {
-        $out = python "$env:WORKSPACE\jenkins\helper\generate_jenkins_scripts.py" "$INNERWORKDIR\ArangoDB\tests\test-definitions.txt" -f ps1
-        If ($LASTEXITCODE -eq 0)
-        {
-            echo $out | Invoke-Expression -ErrorAction Stop
-        }
-        Else
-        {
-            throw "$out"
-        }
-        Set-Variable -Name "ok" -Value $true -Scope global
-    }
-    Catch
-    {
-        Write-Host "Error: $_"
-        Set-Variable -Name "ok" -Value $false -Scope global
-    }
+    pip install py7zr
+    proc -process "python.exe" -argument "$env:WORKSPACE\jenkins\helper\test_launch_controller.py $INNERWORKDIR\ArangoDB\tests\test-definitions.txt" -logfile $false -priority "Normal"
 }
 
 Function global:registerClusterTests()
@@ -38,27 +22,11 @@ Function global:registerClusterTests()
     noteStartAndRepoState
     Write-Host "Registering tests..."
 
-    $global:TESTSUITE_TIMEOUT = 6000
+    $env:TIMELIMIT = 6600
 
     Write-Host "Using test definitions from repo..."
-    Try
-    {
-        $out = python "$env:WORKSPACE\jenkins\helper\generate_jenkins_scripts.py" "$INNERWORKDIR\ArangoDB\tests\test-definitions.txt" -f ps1 --cluster
-        If ($LASTEXITCODE -eq 0)
-        {
-            echo $out | Invoke-Expression -ErrorAction Stop
-        }
-        Else
-        {
-            throw "$out"
-        }
-        Set-Variable -Name "ok" -Value $true -Scope global
-    }
-    Catch
-    {
-        Write-Host "Error: $_"
-        Set-Variable -Name "ok" -Value $false -Scope global
-    }
+    pip install py7zr
+    proc -process "python.exe" -argument "$env:WORKSPACE\jenkins\helper\test_launch_controller.py $INNERWORKDIR\ArangoDB\tests\test-definitions.txt --cluster" -logfile $false -priority "Normal"
 }
 
 runTests