From d604820de9e624f9fd08e5bb372da39945bf1d79 Mon Sep 17 00:00:00 2001 From: Stefano Franz Date: Tue, 26 Nov 2019 13:05:02 +0000 Subject: [PATCH 1/3] put podlogs into folder with taskName to support parallel jenkins unit and integration test run (#5759) --- buildSrc/src/main/groovy/net/corda/testing/KubesTest.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/buildSrc/src/main/groovy/net/corda/testing/KubesTest.java b/buildSrc/src/main/groovy/net/corda/testing/KubesTest.java index cc5775aaae..7bf5b05d59 100644 --- a/buildSrc/src/main/groovy/net/corda/testing/KubesTest.java +++ b/buildSrc/src/main/groovy/net/corda/testing/KubesTest.java @@ -11,7 +11,6 @@ import io.fabric8.kubernetes.api.model.Quantity; import io.fabric8.kubernetes.api.model.Status; import io.fabric8.kubernetes.api.model.StatusCause; import io.fabric8.kubernetes.api.model.StatusDetails; -import io.fabric8.kubernetes.api.model.Toleration; import io.fabric8.kubernetes.api.model.TolerationBuilder; import io.fabric8.kubernetes.client.DefaultKubernetesClient; import io.fabric8.kubernetes.client.KubernetesClient; @@ -424,7 +423,9 @@ public class KubesTest extends DefaultTask { private File startLogPumping(InputStream stdOutIs, int podIdx, File podLogsDirectory, boolean printOutput) throws IOException { - File outputFile = new File(podLogsDirectory, "container-" + podIdx + ".log"); + File outputDir = new File(podLogsDirectory, taskToExecuteName); + outputDir.mkdirs(); + File outputFile = new File(outputDir, "container-" + podIdx + ".log"); outputFile.createNewFile(); Thread loggingThread = new Thread(() -> { try (BufferedWriter out = new BufferedWriter(new FileWriter(outputFile, true)); From d33dbb2ea9c5cfb7a7ef00734eec79757b922e4d Mon Sep 17 00:00:00 2001 From: Razvan Codreanu <52859362+Schife@users.noreply.github.com> Date: Thu, 28 Nov 2019 14:49:39 +0000 Subject: [PATCH 2/3] TM-104 Switching the distributed testing plugin to the one released in artifactory (#5764) * TM-104 switch to using the published plugin * TM-104 switching to artifactory plugin * TM-104 remove unused plugin * TM-104 adding docker plugin * TM-104 adding docker plugin take 2 * add dependencies-dev and set distributed build plugin to changing --- build.gradle | 19 +- buildSrc/build.gradle | 38 -- buildSrc/settings.gradle | 3 - .../groovy/net/corda/testing/Artifactory.java | 147 ----- .../net/corda/testing/BucketingAllocator.java | 209 ------- .../corda/testing/BucketingAllocatorTask.java | 32 - .../net/corda/testing/DistributeTestsBy.java | 5 - .../corda/testing/DistributedTesting.groovy | 310 --------- .../net/corda/testing/ImageBuilding.java | 161 ----- .../groovy/net/corda/testing/KubesTest.java | 589 ------------------ .../testing/ListShufflerAndAllocator.java | 37 -- .../groovy/net/corda/testing/ListTests.java | 89 --- .../net/corda/testing/ParallelTestGroup.java | 115 ---- .../net/corda/testing/PodAllocator.java | 147 ----- .../groovy/net/corda/testing/PodLogLevel.java | 5 - .../groovy/net/corda/testing/Properties.java | 91 --- .../corda/testing/TestDurationArtifacts.java | 430 ------------- .../main/groovy/net/corda/testing/Tests.java | 213 ------- .../java/net/corda/testing/KubePodResult.java | 35 -- .../net/corda/testing/KubesReporting.java | 194 ------ .../java/net/corda/testing/retry/Retry.java | 48 -- .../net/corda/testing/ListTestsTest.java | 43 -- .../net/corda/testing/PropertiesTest.java | 63 -- .../testing/TestDurationArtifactsTest.java | 323 ---------- .../groovy/net/corda/testing/TestsTest.java | 145 ----- .../corda/testing/BucketingAllocatorTest.java | 178 ------ 26 files changed, 11 insertions(+), 3658 deletions(-) delete mode 100644 buildSrc/build.gradle delete mode 100644 buildSrc/settings.gradle delete mode 100644 buildSrc/src/main/groovy/net/corda/testing/Artifactory.java delete mode 100644 buildSrc/src/main/groovy/net/corda/testing/BucketingAllocator.java delete mode 100644 buildSrc/src/main/groovy/net/corda/testing/BucketingAllocatorTask.java delete mode 100644 buildSrc/src/main/groovy/net/corda/testing/DistributeTestsBy.java delete mode 100644 buildSrc/src/main/groovy/net/corda/testing/DistributedTesting.groovy delete mode 100644 buildSrc/src/main/groovy/net/corda/testing/ImageBuilding.java delete mode 100644 buildSrc/src/main/groovy/net/corda/testing/KubesTest.java delete mode 100644 buildSrc/src/main/groovy/net/corda/testing/ListShufflerAndAllocator.java delete mode 100644 buildSrc/src/main/groovy/net/corda/testing/ListTests.java delete mode 100644 buildSrc/src/main/groovy/net/corda/testing/ParallelTestGroup.java delete mode 100644 buildSrc/src/main/groovy/net/corda/testing/PodAllocator.java delete mode 100644 buildSrc/src/main/groovy/net/corda/testing/PodLogLevel.java delete mode 100644 buildSrc/src/main/groovy/net/corda/testing/Properties.java delete mode 100644 buildSrc/src/main/groovy/net/corda/testing/TestDurationArtifacts.java delete mode 100644 buildSrc/src/main/groovy/net/corda/testing/Tests.java delete mode 100644 buildSrc/src/main/java/net/corda/testing/KubePodResult.java delete mode 100644 buildSrc/src/main/java/net/corda/testing/KubesReporting.java delete mode 100644 buildSrc/src/main/java/net/corda/testing/retry/Retry.java delete mode 100644 buildSrc/src/test/groovy/net/corda/testing/ListTestsTest.java delete mode 100644 buildSrc/src/test/groovy/net/corda/testing/PropertiesTest.java delete mode 100644 buildSrc/src/test/groovy/net/corda/testing/TestDurationArtifactsTest.java delete mode 100644 buildSrc/src/test/groovy/net/corda/testing/TestsTest.java delete mode 100644 buildSrc/src/test/java/net/corda/testing/BucketingAllocatorTest.java diff --git a/build.gradle b/build.gradle index d77bd17df3..5f1ae94300 100644 --- a/build.gradle +++ b/build.gradle @@ -1,8 +1,6 @@ -import net.corda.testing.DistributeTestsBy -import net.corda.testing.DistributedTesting -import net.corda.testing.ImageBuilding -import net.corda.testing.ParallelTestGroup -import net.corda.testing.PodLogLevel +import com.r3.testing.DistributeTestsBy +import com.r3.testing.ParallelTestGroup +import com.r3.testing.PodLogLevel import static org.gradle.api.JavaVersion.VERSION_11 import static org.gradle.api.JavaVersion.VERSION_1_8 @@ -152,6 +150,9 @@ buildscript { maven { url 'https://kotlin.bintray.com/kotlinx' } + maven { + url "https://ci-artifactory.corda.r3cev.com/artifactory/corda-dependencies-dev" + } maven { url "$artifactory_contextUrl/corda-releases" } @@ -176,6 +177,8 @@ buildscript { // Capsule gradle plugin forked and maintained locally to support Gradle 5.x // See https://github.com/corda/gradle-capsule-plugin classpath "us.kirchmeier:gradle-capsule-plugin:1.0.4_r3" + classpath group: "com.r3.testing", name: "gradle-distributed-testing-plugin", version: "1.2-SNAPSHOT", changing: true + classpath "com.bmuschko:gradle-docker-plugin:5.0.0" } } @@ -183,7 +186,6 @@ plugins { // Add the shadow plugin to the plugins classpath for the entire project. id 'com.github.johnrengelman.shadow' version '2.0.4' apply false id "com.gradle.build-scan" version "2.2.1" - id 'com.bmuschko.docker-remote-api' } ext { @@ -194,6 +196,7 @@ apply plugin: 'com.github.ben-manes.versions' apply plugin: 'net.corda.plugins.publish-utils' apply plugin: 'maven-publish' apply plugin: 'com.jfrog.artifactory' +apply plugin: "com.bmuschko.docker-remote-api" // We need the following three lines even though they're inside an allprojects {} block below because otherwise // IntelliJ gets confused when importing the project and ends up erasing and recreating the .idea directory, along @@ -638,5 +641,5 @@ task allParallelSmokeTest(type: ParallelTestGroup) { memoryInGbPerFork 10 distribute DistributeTestsBy.CLASS } -apply plugin: ImageBuilding -apply plugin: DistributedTesting +apply plugin: 'com.r3.testing.distributed-testing' +apply plugin: 'com.r3.testing.image-building' diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle deleted file mode 100644 index 0e2e78274d..0000000000 --- a/buildSrc/build.gradle +++ /dev/null @@ -1,38 +0,0 @@ -buildscript { - Properties constants = new Properties() - file("../constants.properties").withInputStream { constants.load(it) } - - ext { - guava_version = constants.getProperty("guavaVersion") - class_graph_version = constants.getProperty('classgraphVersion') - assertj_version = '3.9.1' - junit_version = '4.12' - } -} - -repositories { - mavenLocal() - mavenCentral() - jcenter() -} - -allprojects { - tasks.withType(Test) { - // Prevent the project from creating temporary files outside of the build directory. - systemProperty 'java.io.tmpdir', buildDir.absolutePath - } -} - -dependencies { - compile gradleApi() - compile "io.fabric8:kubernetes-client:4.4.1" - compile 'org.apache.commons:commons-compress:1.19' - compile 'org.apache.commons:commons-lang3:3.9' - compile 'commons-codec:commons-codec:1.13' - compile "io.github.classgraph:classgraph:$class_graph_version" - compile "com.bmuschko:gradle-docker-plugin:5.0.0" - compile 'org.apache.commons:commons-csv:1.1' - compile group: 'org.jetbrains', name: 'annotations', version: '13.0' - testCompile "junit:junit:$junit_version" - testCompile group: 'org.hamcrest', name: 'hamcrest-all', version: '1.3' -} diff --git a/buildSrc/settings.gradle b/buildSrc/settings.gradle deleted file mode 100644 index fd0f847cd4..0000000000 --- a/buildSrc/settings.gradle +++ /dev/null @@ -1,3 +0,0 @@ -rootProject.name = 'buildSrc' - -apply from: '../buildCacheSettings.gradle' diff --git a/buildSrc/src/main/groovy/net/corda/testing/Artifactory.java b/buildSrc/src/main/groovy/net/corda/testing/Artifactory.java deleted file mode 100644 index 63e44aea55..0000000000 --- a/buildSrc/src/main/groovy/net/corda/testing/Artifactory.java +++ /dev/null @@ -1,147 +0,0 @@ -package net.corda.testing; - -import okhttp3.*; -import org.apache.commons.compress.utils.IOUtils; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.*; - -/** - * Used by TestArtifacts - */ -public class Artifactory { - - // - private static final Logger LOG = LoggerFactory.getLogger(Artifactory.class); - - private static String authorization() { - return Credentials.basic(Properties.getUsername(), Properties.getPassword()); - } - - /** - * Construct the URL in a style that Artifactory prefers. - * - * @param baseUrl e.g. https://software.r3.com/artifactory/corda-releases/net/corda/corda/ - * @param theTag e.g. 4.3-RC0 - * @param artifact e.g. corda - * @param extension e.g. jar - * @return full URL to artifact. - */ - private static String getFullUrl(@NotNull final String baseUrl, - @NotNull final String theTag, - @NotNull final String artifact, - @NotNull final String extension) { - return baseUrl + "/" + theTag + "/" + getFileName(artifact, extension, theTag); - } - - /** - * @param artifact e.g. corda - * @param extension e.g. jar - * @param theTag e.g. 4.3 - * @return e.g. corda-4.3.jar - */ - static String getFileName(@NotNull final String artifact, - @NotNull final String extension, - @Nullable final String theTag) { - StringBuilder sb = new StringBuilder().append(artifact); - if (theTag != null) { - sb.append("-").append(theTag); - } - sb.append(".").append(extension); - return sb.toString(); - } - // - - /** - * Get the unit tests, synchronous get. - * See https://www.jfrog.com/confluence/display/RTF/Artifactory+REST+API#ArtifactoryRESTAPI-RetrieveLatestArtifact - * - * @return true if successful, false otherwise. - */ - boolean get(@NotNull final String baseUrl, - @NotNull final String theTag, - @NotNull final String artifact, - @NotNull final String extension, - @NotNull final OutputStream outputStream) { - final String url = getFullUrl(baseUrl, theTag, artifact, extension); - final Request request = new Request.Builder() - .addHeader("Authorization", authorization()) - .url(url) - .build(); - - final OkHttpClient client = new OkHttpClient(); - - try (Response response = client.newCall(request).execute()) { - handleResponse(response); - if (response.body() != null) { - outputStream.write(response.body().bytes()); - } else { - LOG.warn("Response body was empty"); - } - } catch (IOException e) { - LOG.warn("Unable to execute GET via REST"); - LOG.debug("Exception", e); - return false; - } - - LOG.warn("Ok. REST GET successful"); - - return true; - } - - /** - * Post an artifact, synchronous PUT - * See https://www.jfrog.com/confluence/display/RTF/Artifactory+REST+API#ArtifactoryRESTAPI-DeployArtifact - * - * @return true if successful - */ - boolean put(@NotNull final String baseUrl, - @NotNull final String theTag, - @NotNull final String artifact, - @NotNull final String extension, - @NotNull final InputStream inputStream) { - final MediaType contentType = MediaType.parse("application/zip, application/octet-stream"); - final String url = getFullUrl(baseUrl, theTag, artifact, extension); - - final OkHttpClient client = new OkHttpClient(); - - byte[] bytes; - - try { - bytes = IOUtils.toByteArray(inputStream); - } catch (IOException e) { - LOG.warn("Unable to execute PUT tests via REST: ", e); - return false; - } - - final Request request = new Request.Builder() - .addHeader("Authorization", authorization()) - .url(url) - .put(RequestBody.create(contentType, bytes)) - .build(); - - try (Response response = client.newCall(request).execute()) { - handleResponse(response); - } catch (IOException e) { - LOG.warn("Unable to execute PUT via REST: ", e); - return false; - } - - return true; - } - - private void handleResponse(@NotNull final Response response) throws IOException { - if (response.isSuccessful()) return; - - LOG.warn("Bad response from server: {}", response.toString()); - LOG.warn(response.toString()); - if (response.code() == 401) { - throw new IOException("Not authorized - incorrect credentials?"); - } - - throw new IOException(response.message()); - } -} diff --git a/buildSrc/src/main/groovy/net/corda/testing/BucketingAllocator.java b/buildSrc/src/main/groovy/net/corda/testing/BucketingAllocator.java deleted file mode 100644 index 14c2ea1c07..0000000000 --- a/buildSrc/src/main/groovy/net/corda/testing/BucketingAllocator.java +++ /dev/null @@ -1,209 +0,0 @@ -package net.corda.testing; - -//Why Java?! because sometimes types are useful. - -import groovy.lang.Tuple2; -import org.gradle.api.tasks.TaskAction; -import org.jetbrains.annotations.NotNull; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; -import java.util.function.Supplier; -import java.util.stream.Collectors; -import java.util.stream.IntStream; - -import static net.corda.testing.ListTests.DISTRIBUTION_PROPERTY; - -public class BucketingAllocator { - private static final Logger LOG = LoggerFactory.getLogger(BucketingAllocator.class); - private final List forkContainers; - private final Supplier timedTestsProvider; - private List> sources = new ArrayList<>(); - - private DistributeTestsBy distribution = System.getProperty(DISTRIBUTION_PROPERTY) != null && !System.getProperty(DISTRIBUTION_PROPERTY).isEmpty() ? - DistributeTestsBy.valueOf(System.getProperty(DISTRIBUTION_PROPERTY)) : DistributeTestsBy.METHOD; - - - public BucketingAllocator(Integer forkCount, Supplier timedTestsProvider) { - this.forkContainers = IntStream.range(0, forkCount).mapToObj(TestsForForkContainer::new).collect(Collectors.toList()); - this.timedTestsProvider = timedTestsProvider; - } - - public void addSource(TestLister source, Object testTask) { - sources.add(new Tuple2<>(source, testTask)); - } - - public List getTestsForForkAndTestTask(Integer fork, Object testTask) { - return forkContainers.get(fork).getTestsForTask(testTask); - } - - @TaskAction - public void generateTestPlan() { - Tests allTestsFromFile = timedTestsProvider.get(); - List> allDiscoveredTests = getTestsOnClasspathOfTestingTasks(); - List matchedTests = matchClasspathTestsToFile(allTestsFromFile, allDiscoveredTests); - - //use greedy algo - for each testbucket find the currently smallest container and add to it - allocateTestsToForks(matchedTests); - forkContainers.forEach(TestsForForkContainer::freeze); - - printSummary(); - } - - static String getDuration(long nanos) { - long t = TimeUnit.NANOSECONDS.toMinutes(nanos); - if (t > 0) { - return t + " mins"; - } - t = TimeUnit.NANOSECONDS.toSeconds(nanos); - if (t > 0) { - return t + " secs"; - } - t = TimeUnit.NANOSECONDS.toMillis(nanos); - if (t > 0) { - return t + " ms"; - } - return nanos + " ns"; - } - - private void printSummary() { - forkContainers.forEach(container -> { - System.out.println("####### TEST PLAN SUMMARY ( " + container.forkIdx + " ) #######"); - System.out.println("Duration: " + getDuration(container.getCurrentDuration())); - System.out.println("Number of tests: " + container.testsForFork.stream().mapToInt(b -> b.foundTests.size()).sum()); - System.out.println("Tests to Run: "); - container.testsForFork.forEach(tb -> { - System.out.println(tb.testName); - tb.foundTests.forEach(ft -> System.out.println("\t" + ft.getFirst() + ", " + getDuration(ft.getSecond()))); - }); - }); - } - - private void allocateTestsToForks(@NotNull List matchedTests) { - matchedTests.forEach(matchedTestBucket -> { - TestsForForkContainer smallestContainer = Collections.min(forkContainers, Comparator.comparing(TestsForForkContainer::getCurrentDuration)); - smallestContainer.addBucket(matchedTestBucket); - }); - } - - List getForkContainers() { - return forkContainers; - } - - private List matchClasspathTestsToFile(@NotNull final Tests tests, - @NotNull final List> allDiscoveredTests) { - // Note that this does not preserve the order of tests with known and unknown durations, as we - // always return a duration from 'tests.startsWith'. - return allDiscoveredTests.stream().map(tuple -> { - final String testName = tuple.getFirst(); - final Object task = tuple.getSecond(); - - // If the gradle task is distributing by class rather than method, then 'testName' will be the className - // and not className.testName - // No matter which it is, we return the mean test duration as the duration value if not found. - final List> matchingTests; - switch (distribution) { - case METHOD: - matchingTests = tests.equals(testName); - break; - case CLASS: - matchingTests = tests.startsWith(testName); - break; - default: - throw new IllegalArgumentException("Unknown distribution type: " + distribution); - } - - return new TestBucket(task, testName, matchingTests); - }).sorted(Comparator.comparing(TestBucket::getDuration).reversed()).collect(Collectors.toList()); - } - - private List> getTestsOnClasspathOfTestingTasks() { - return sources.stream().map(source -> { - TestLister lister = source.getFirst(); - Object testTask = source.getSecond(); - return lister.getAllTestsDiscovered().stream().map(test -> new Tuple2<>(test, testTask)).collect(Collectors.toList()); - }).flatMap(Collection::stream).sorted(Comparator.comparing(Tuple2::getFirst)).collect(Collectors.toList()); - } - - public static class TestBucket { - final Object testTask; - final String testName; - final List> foundTests; - final long durationNanos; - - public TestBucket(@NotNull final Object testTask, - @NotNull final String testName, - @NotNull final List> foundTests) { - this.testTask = testTask; - this.testName = testName; - this.foundTests = foundTests; - this.durationNanos = foundTests.stream().mapToLong(tp -> Math.max(tp.getSecond(), 1)).sum(); - } - - public long getDuration() { - return durationNanos; - } - - @Override - public String toString() { - return "TestBucket{" + - "testTask=" + testTask + - ", nameWithAsterix='" + testName + '\'' + - ", foundTests=" + foundTests + - ", durationNanos=" + durationNanos + - '}'; - } - } - - public static class TestsForForkContainer { - private final Integer forkIdx; - private final List testsForFork = Collections.synchronizedList(new ArrayList<>()); - private final Map> frozenTests = new HashMap<>(); - private long runningDuration = 0L; - - public TestsForForkContainer(Integer forkIdx) { - this.forkIdx = forkIdx; - } - - public void addBucket(TestBucket tb) { - this.testsForFork.add(tb); - this.runningDuration = this.runningDuration + tb.durationNanos; - } - - public Long getCurrentDuration() { - return runningDuration; - } - - public void freeze() { - testsForFork.forEach(tb -> { - frozenTests.computeIfAbsent(tb.testTask, i -> new ArrayList<>()).add(tb); - }); - } - - public List getTestsForTask(Object task) { - return frozenTests.getOrDefault(task, Collections.emptyList()).stream().map(it -> it.testName).collect(Collectors.toList()); - } - - public List getBucketsForFork() { - return new ArrayList<>(testsForFork); - } - - @Override - public String toString() { - return "TestsForForkContainer{" + - "runningDuration=" + runningDuration + - ", forkIdx=" + forkIdx + - ", testsForFork=" + testsForFork + - ", frozenTests=" + frozenTests + - '}'; - } - } -} diff --git a/buildSrc/src/main/groovy/net/corda/testing/BucketingAllocatorTask.java b/buildSrc/src/main/groovy/net/corda/testing/BucketingAllocatorTask.java deleted file mode 100644 index 06a0d43141..0000000000 --- a/buildSrc/src/main/groovy/net/corda/testing/BucketingAllocatorTask.java +++ /dev/null @@ -1,32 +0,0 @@ -package net.corda.testing; - -import org.gradle.api.DefaultTask; -import org.gradle.api.tasks.TaskAction; -import org.gradle.api.tasks.testing.Test; - -import javax.inject.Inject; -import java.util.List; -import java.util.stream.Collectors; - -public class BucketingAllocatorTask extends DefaultTask { - private final BucketingAllocator allocator; - - @Inject - public BucketingAllocatorTask(Integer forkCount) { - this.allocator = new BucketingAllocator(forkCount, TestDurationArtifacts.getTestsSupplier()); - } - - public void addSource(TestLister source, Test testTask) { - allocator.addSource(source, testTask); - this.dependsOn(source); - } - - public List getTestIncludesForForkAndTestTask(Integer fork, Test testTask) { - return allocator.getTestsForForkAndTestTask(fork, testTask).stream().map(t -> t + "*").collect(Collectors.toList()); - } - - @TaskAction - public void allocate() { - allocator.generateTestPlan(); - } -} diff --git a/buildSrc/src/main/groovy/net/corda/testing/DistributeTestsBy.java b/buildSrc/src/main/groovy/net/corda/testing/DistributeTestsBy.java deleted file mode 100644 index 4c53b07aad..0000000000 --- a/buildSrc/src/main/groovy/net/corda/testing/DistributeTestsBy.java +++ /dev/null @@ -1,5 +0,0 @@ -package net.corda.testing; - -public enum DistributeTestsBy { - CLASS, METHOD -} diff --git a/buildSrc/src/main/groovy/net/corda/testing/DistributedTesting.groovy b/buildSrc/src/main/groovy/net/corda/testing/DistributedTesting.groovy deleted file mode 100644 index c0335e4bfc..0000000000 --- a/buildSrc/src/main/groovy/net/corda/testing/DistributedTesting.groovy +++ /dev/null @@ -1,310 +0,0 @@ -package net.corda.testing - -import com.bmuschko.gradle.docker.tasks.image.DockerBuildImage -import com.bmuschko.gradle.docker.tasks.image.DockerPushImage -import org.gradle.api.GradleException -import org.gradle.api.Plugin -import org.gradle.api.Project -import org.gradle.api.Task -import org.gradle.api.tasks.testing.Test -import org.gradle.api.tasks.testing.TestResult -import org.gradle.internal.impldep.junit.framework.TestFailure - -import java.util.stream.Collectors - -/** - This plugin is responsible for wiring together the various components of test task modification - */ -class DistributedTesting implements Plugin { - - public static final String GRADLE_GROUP = "Distributed Testing"; - - static def getPropertyAsInt(Project proj, String property, Integer defaultValue) { - return proj.hasProperty(property) ? Integer.parseInt(proj.property(property).toString()) : defaultValue - } - - @Override - void apply(Project project) { - if (System.getProperty("kubenetize") != null) { - Properties.setRootProjectType(project.rootProject.name) - - Integer forks = getPropertyAsInt(project, "dockerForks", 1) - - ensureImagePluginIsApplied(project) - ImageBuilding imagePlugin = project.plugins.getPlugin(ImageBuilding) - DockerPushImage imagePushTask = imagePlugin.pushTask - DockerBuildImage imageBuildTask = imagePlugin.buildTask - String tagToUseForRunningTests = System.getProperty(ImageBuilding.PROVIDE_TAG_FOR_RUNNING_PROPERTY) - String tagToUseForBuilding = System.getProperty(ImageBuilding.PROVIDE_TAG_FOR_RUNNING_PROPERTY) - BucketingAllocatorTask globalAllocator = project.tasks.create("bucketingAllocator", BucketingAllocatorTask, forks) - globalAllocator.group = GRADLE_GROUP - globalAllocator.description = "Allocates tests to buckets" - - - Set requestedTaskNames = project.gradle.startParameter.taskNames.toSet() - def requestedTasks = requestedTaskNames.collect { project.tasks.findByPath(it) } - - //in each subproject - //1. add the task to determine all tests within the module and register this as a source to the global allocator - //2. modify the underlying testing task to use the output of the global allocator to include a subset of tests for each fork - //3. KubesTest will invoke these test tasks in a parallel fashion on a remote k8s cluster - //4. after each completed test write its name to a file to keep track of what finished for restart purposes - project.subprojects { Project subProject -> - subProject.tasks.withType(Test) { Test task -> - project.logger.info("Evaluating ${task.getPath()}") - if (task in requestedTasks && !task.hasProperty("ignoreForDistribution")) { - project.logger.info "Modifying ${task.getPath()}" - Task testListerTask = createTestListingTasks(task, subProject) - globalAllocator.addSource(testListerTask, task) - Test modifiedTestTask = modifyTestTaskForParallelExecution(subProject, task, globalAllocator) - } else { - project.logger.info "Skipping modification of ${task.getPath()} as it's not scheduled for execution" - } - if (!task.hasProperty("ignoreForDistribution")) { - //this is what enables execution of a single test suite - for example node:parallelTest would execute all unit tests in node, node:parallelIntegrationTest would do the same for integration tests - KubesTest parallelTestTask = generateParallelTestingTask(subProject, task, imagePushTask, tagToUseForRunningTests) - } - } - } - - //now we are going to create "super" groupings of the Test tasks, so that it is possible to invoke all submodule tests with a single command - //group all test Tasks by their underlying target task (test/integrationTest/smokeTest ... etc) - Map> allTestTasksGroupedByType = project.subprojects.collect { prj -> prj.getAllTasks(false).values() } - .flatten() - .findAll { task -> task instanceof Test } - .groupBy { Test task -> task.name } - - //first step is to create a single task which will invoke all the submodule tasks for each grouping - //ie allParallelTest will invoke [node:test, core:test, client:rpc:test ... etc] - //ie allIntegrationTest will invoke [node:integrationTest, core:integrationTest, client:rpc:integrationTest ... etc] - //ie allUnitAndIntegrationTest will invoke [node:integrationTest, node:test, core:integrationTest, core:test, client:rpc:test , client:rpc:integrationTest ... etc] - Set userGroups = new HashSet<>(project.tasks.withType(ParallelTestGroup)) - - userGroups.forEach { testGrouping -> - - //for each "group" (ie: test, integrationTest) within the grouping find all the Test tasks which have the same name. - List testTasksToRunInGroup = ((ParallelTestGroup) testGrouping).getGroups().collect { - allTestTasksGroupedByType.get(it) - }.flatten() - - //join up these test tasks into a single set of tasks to invoke (node:test, node:integrationTest...) - String superListOfTasks = testTasksToRunInGroup.collect { it.path }.join(" ") - - //generate a preAllocate / deAllocate task which allows you to "pre-book" a node during the image building phase - //this prevents time lost to cloud provider node spin up time (assuming image build time > provider spin up time) - def (Task preAllocateTask, Task deAllocateTask) = generatePreAllocateAndDeAllocateTasksForGrouping(project, testGrouping) - - //modify the image building task to depend on the preAllocate task (if specified on the command line) - this prevents gradle running out of order - if (preAllocateTask.name in requestedTaskNames) { - imageBuildTask.dependsOn preAllocateTask - imagePushTask.finalizedBy(deAllocateTask) - } - - def userDefinedParallelTask = project.rootProject.tasks.create("userDefined" + testGrouping.getName().capitalize(), KubesTest) { - group = GRADLE_GROUP - - if (!tagToUseForRunningTests) { - dependsOn imagePushTask - } - - if (deAllocateTask.name in requestedTaskNames) { - dependsOn deAllocateTask - } - numberOfPods = testGrouping.getShardCount() - printOutput = testGrouping.getPrintToStdOut() - fullTaskToExecutePath = superListOfTasks - taskToExecuteName = testGrouping.getGroups().join("And") - memoryGbPerFork = testGrouping.getGbOfMemory() - numberOfCoresPerFork = testGrouping.getCoresToUse() - distribution = testGrouping.getDistribution() - podLogLevel = testGrouping.getLogLevel() - taints = testGrouping.getNodeTaints() - sidecarImage = testGrouping.sidecarImage - additionalArgs = testGrouping.additionalArgs - doFirst { - dockerTag = tagToUseForRunningTests ? (ImageBuilding.registryName + ":" + tagToUseForRunningTests) : (imagePushTask.imageName.get() + ":" + imagePushTask.tag.get()) - } - } - def reportOnAllTask = project.rootProject.tasks.create("userDefinedReports${testGrouping.getName().capitalize()}", KubesReporting) { - group = GRADLE_GROUP - dependsOn userDefinedParallelTask - destinationDir new File(project.rootProject.getBuildDir(), "userDefinedReports${testGrouping.getName().capitalize()}") - doFirst { - destinationDir.deleteDir() - shouldPrintOutput = !testGrouping.getPrintToStdOut() - podResults = userDefinedParallelTask.containerResults - reportOn(userDefinedParallelTask.testOutput) - } - } - - // Task to zip up test results, and upload them to somewhere (Artifactory). - def zipTask = TestDurationArtifacts.createZipTask(project.rootProject, testGrouping.name, userDefinedParallelTask) - - userDefinedParallelTask.finalizedBy(reportOnAllTask) - zipTask.dependsOn(userDefinedParallelTask) - testGrouping.dependsOn(zipTask) - } - } - - // Added only so that we can manually run zipTask on the command line as a test. - TestDurationArtifacts.createZipTask(project.rootProject, "zipTask", null) - .setDescription("Zip task that can be run locally for testing"); - } - - private List generatePreAllocateAndDeAllocateTasksForGrouping(Project project, ParallelTestGroup testGrouping) { - PodAllocator allocator = new PodAllocator(project.getLogger()) - Task preAllocateTask = project.rootProject.tasks.create("preAllocateFor" + testGrouping.getName().capitalize()) { - group = GRADLE_GROUP - doFirst { - String dockerTag = System.getProperty(ImageBuilding.PROVIDE_TAG_FOR_BUILDING_PROPERTY) - if (dockerTag == null) { - throw new GradleException("pre allocation cannot be used without a stable docker tag - please provide one using -D" + ImageBuilding.PROVIDE_TAG_FOR_BUILDING_PROPERTY) - } - int seed = (dockerTag.hashCode() + testGrouping.getName().hashCode()) - String podPrefix = new BigInteger(64, new Random(seed)).toString(36) - //here we will pre-request the correct number of pods for this testGroup - int numberOfPodsToRequest = testGrouping.getShardCount() - int coresPerPod = testGrouping.getCoresToUse() - int memoryGBPerPod = testGrouping.getGbOfMemory() - allocator.allocatePods(numberOfPodsToRequest, coresPerPod, memoryGBPerPod, podPrefix, testGrouping.getNodeTaints()) - } - } - - Task deAllocateTask = project.rootProject.tasks.create("deAllocateFor" + testGrouping.getName().capitalize()) { - group = GRADLE_GROUP - doFirst { - String dockerTag = System.getProperty(ImageBuilding.PROVIDE_TAG_FOR_RUNNING_PROPERTY) ?: - System.getProperty(ImageBuilding.PROVIDE_TAG_FOR_BUILDING_PROPERTY) - if (dockerTag == null) { - throw new GradleException("pre allocation cannot be used without a stable docker tag - please provide one using -D" + ImageBuilding.PROVIDE_TAG_FOR_RUNNING_PROPERTY) - } - int seed = (dockerTag.hashCode() + testGrouping.getName().hashCode()) - String podPrefix = new BigInteger(64, new Random(seed)).toString(36); - allocator.tearDownPods(podPrefix) - } - } - return [preAllocateTask, deAllocateTask] - } - - private KubesTest generateParallelTestingTask(Project projectContainingTask, Test task, DockerPushImage imageBuildingTask, String providedTag) { - def taskName = task.getName() - def capitalizedTaskName = task.getName().capitalize() - - KubesTest createdParallelTestTask = projectContainingTask.tasks.create("parallel" + capitalizedTaskName, KubesTest) { - group = GRADLE_GROUP + " Parallel Test Tasks" - if (!providedTag) { - dependsOn imageBuildingTask - } - printOutput = true - fullTaskToExecutePath = task.getPath() - taskToExecuteName = taskName - doFirst { - dockerTag = providedTag ? ImageBuilding.registryName + ":" + providedTag : (imageBuildingTask.imageName.get() + ":" + imageBuildingTask.tag.get()) - } - } - projectContainingTask.logger.info "Created task: ${createdParallelTestTask.getPath()} to enable testing on kubenetes for task: ${task.getPath()}" - return createdParallelTestTask as KubesTest - } - - private Test modifyTestTaskForParallelExecution(Project subProject, Test task, BucketingAllocatorTask globalAllocator) { - subProject.logger.info("modifying task: ${task.getPath()} to depend on task ${globalAllocator.getPath()}") - def reportsDir = new File(new File(KubesTest.TEST_RUN_DIR, "test-reports"), subProject.name + "-" + task.name) - reportsDir.mkdirs() - File executedTestsFile = new File(KubesTest.TEST_RUN_DIR + "/executedTests.txt") - task.configure { - dependsOn globalAllocator - binResultsDir new File(reportsDir, "binary") - reports.junitXml.destination new File(reportsDir, "xml") - maxHeapSize = "10g" - - doFirst { - executedTestsFile.createNewFile() - filter { - List executedTests = executedTestsFile.readLines() - //adding wildcard to each test so they match the ones in the includes list - executedTests.replaceAll({ test -> test + "*" }) - def fork = getPropertyAsInt(subProject, "dockerFork", 0) - subProject.logger.info("requesting tests to include in testing task ${task.getPath()} (idx: ${fork})") - List includes = globalAllocator.getTestIncludesForForkAndTestTask( - fork, - task) - subProject.logger.info "got ${includes.size()} tests to include into testing task ${task.getPath()}" - subProject.logger.info "INCLUDE: ${includes.toString()} " - subProject.logger.info "got ${executedTests.size()} tests to exclude from testing task ${task.getPath()}" - subProject.logger.debug "EXCLUDE: ${executedTests.toString()} " - if (includes.size() == 0) { - subProject.logger.info "Disabling test execution for testing task ${task.getPath()}" - excludeTestsMatching "*" - } - - List intersection = executedTests.stream() - .filter(includes.&contains) - .collect(Collectors.toList()) - subProject.logger.info "got ${intersection.size()} tests in intersection" - subProject.logger.info "INTERSECTION: ${intersection.toString()} " - includes.removeAll(intersection) - - intersection.forEach { exclude -> - subProject.logger.info "excluding: $exclude for testing task ${task.getPath()}" - excludeTestsMatching exclude - } - includes.forEach { include -> - subProject.logger.info "including: $include for testing task ${task.getPath()}" - includeTestsMatching include - } - failOnNoMatchingTests false - } - } - - afterTest { desc, result -> - if (result.getResultType() == TestResult.ResultType.SUCCESS ) { - executedTestsFile.withWriterAppend { writer -> - writer.writeLine(desc.getClassName() + "." + desc.getName()) - } - } - } - } - - return task - } - - private static void ensureImagePluginIsApplied(Project project) { - project.plugins.apply(ImageBuilding) - } - - private Task createTestListingTasks(Test task, Project subProject) { - def taskName = task.getName() - def capitalizedTaskName = task.getName().capitalize() - //determine all the tests which are present in this test task. - //this list will then be shared between the various worker forks - ListTests createdListTask = subProject.tasks.create("listTestsFor" + capitalizedTaskName, ListTests) { - group = GRADLE_GROUP - //the convention is that a testing task is backed by a sourceSet with the same name - dependsOn subProject.getTasks().getByName("${taskName}Classes") - doFirst { - //we want to set the test scanning classpath to only the output of the sourceSet - this prevents dependencies polluting the list - scanClassPath = task.getTestClassesDirs() ? task.getTestClassesDirs() : Collections.emptyList() - } - } - - //convenience task to utilize the output of the test listing task to display to local console, useful for debugging missing tests - def createdPrintTask = subProject.tasks.create("printTestsFor" + capitalizedTaskName) { - group = GRADLE_GROUP - dependsOn createdListTask - doLast { - createdListTask.getTestsForFork( - getPropertyAsInt(subProject, "dockerFork", 0), - getPropertyAsInt(subProject, "dockerForks", 1), - 42).forEach { testName -> - println testName - } - } - } - - subProject.logger.info("created task: " + createdListTask.getPath() + " in project: " + subProject + " it dependsOn: " + createdListTask.dependsOn) - subProject.logger.info("created task: " + createdPrintTask.getPath() + " in project: " + subProject + " it dependsOn: " + createdPrintTask.dependsOn) - - return createdListTask - } - -} diff --git a/buildSrc/src/main/groovy/net/corda/testing/ImageBuilding.java b/buildSrc/src/main/groovy/net/corda/testing/ImageBuilding.java deleted file mode 100644 index 6d2346e7dc..0000000000 --- a/buildSrc/src/main/groovy/net/corda/testing/ImageBuilding.java +++ /dev/null @@ -1,161 +0,0 @@ -package net.corda.testing; - -import com.bmuschko.gradle.docker.DockerRegistryCredentials; -import com.bmuschko.gradle.docker.tasks.container.DockerCreateContainer; -import com.bmuschko.gradle.docker.tasks.container.DockerLogsContainer; -import com.bmuschko.gradle.docker.tasks.container.DockerRemoveContainer; -import com.bmuschko.gradle.docker.tasks.container.DockerStartContainer; -import com.bmuschko.gradle.docker.tasks.container.DockerWaitContainer; -import com.bmuschko.gradle.docker.tasks.image.DockerBuildImage; -import com.bmuschko.gradle.docker.tasks.image.DockerCommitImage; -import com.bmuschko.gradle.docker.tasks.image.DockerPullImage; -import com.bmuschko.gradle.docker.tasks.image.DockerPushImage; -import com.bmuschko.gradle.docker.tasks.image.DockerRemoveImage; -import com.bmuschko.gradle.docker.tasks.image.DockerTagImage; -import org.gradle.api.GradleException; -import org.gradle.api.Plugin; -import org.gradle.api.Project; -import org.jetbrains.annotations.NotNull; - -import java.io.File; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; - -/** - * this plugin is responsible for setting up all the required docker image building tasks required for producing and pushing an - * image of the current build output to a remote container registry - */ -public class ImageBuilding implements Plugin { - - public static final String registryName = "stefanotestingcr.azurecr.io/testing"; - public static final String PROVIDE_TAG_FOR_BUILDING_PROPERTY = "docker.build.tag"; - public static final String PROVIDE_TAG_FOR_RUNNING_PROPERTY = "docker.run.tag"; - public DockerPushImage pushTask; - public DockerBuildImage buildTask; - - @Override - public void apply(@NotNull final Project project) { - - final DockerRegistryCredentials registryCredentialsForPush = new DockerRegistryCredentials(project.getObjects()); - registryCredentialsForPush.getUsername().set("stefanotestingcr"); - registryCredentialsForPush.getPassword().set(System.getProperty("docker.push.password", "")); - - final DockerPullImage pullTask = project.getTasks().create("pullBaseImage", DockerPullImage.class, dockerPullImage -> { - dockerPullImage.doFirst(task -> dockerPullImage.setRegistryCredentials(registryCredentialsForPush)); - dockerPullImage.getRepository().set("stefanotestingcr.azurecr.io/buildbase"); - dockerPullImage.getTag().set("latest"); - }); - - - final DockerBuildImage buildDockerImageForSource = project.getTasks().create("buildDockerImageForSource", DockerBuildImage.class, - dockerBuildImage -> { - dockerBuildImage.dependsOn(Arrays.asList(project.getRootProject().getTasksByName("clean", true), pullTask)); - dockerBuildImage.getInputDir().set(new File(".")); - dockerBuildImage.getDockerFile().set(new File(new File("testing"), "Dockerfile")); - }); - - this.buildTask = buildDockerImageForSource; - - final DockerCreateContainer createBuildContainer = project.getTasks().create("createBuildContainer", DockerCreateContainer.class, - dockerCreateContainer -> { - final File baseWorkingDir = new File(System.getProperty("docker.work.dir") != null && - !System.getProperty("docker.work.dir").isEmpty() ? - System.getProperty("docker.work.dir") : System.getProperty("java.io.tmpdir")); - final File gradleDir = new File(baseWorkingDir, "gradle"); - final File mavenDir = new File(baseWorkingDir, "maven"); - dockerCreateContainer.doFirst(task -> { - if (!gradleDir.exists()) { - gradleDir.mkdirs(); - } - if (!mavenDir.exists()) { - mavenDir.mkdirs(); - } - - project.getLogger().info("Will use: " + gradleDir.getAbsolutePath() + " for caching gradle artifacts"); - }); - dockerCreateContainer.dependsOn(buildDockerImageForSource); - dockerCreateContainer.targetImageId(buildDockerImageForSource.getImageId()); - final Map map = new HashMap<>(); - map.put(gradleDir.getAbsolutePath(), "/tmp/gradle"); - map.put(mavenDir.getAbsolutePath(), "/home/root/.m2"); - dockerCreateContainer.getBinds().set(map); - }); - - - final DockerStartContainer startBuildContainer = project.getTasks().create("startBuildContainer", DockerStartContainer.class, - dockerStartContainer -> { - dockerStartContainer.dependsOn(createBuildContainer); - dockerStartContainer.targetContainerId(createBuildContainer.getContainerId()); - }); - - final DockerLogsContainer logBuildContainer = project.getTasks().create("logBuildContainer", DockerLogsContainer.class, - dockerLogsContainer -> { - dockerLogsContainer.dependsOn(startBuildContainer); - dockerLogsContainer.targetContainerId(createBuildContainer.getContainerId()); - dockerLogsContainer.getFollow().set(true); - }); - - final DockerWaitContainer waitForBuildContainer = project.getTasks().create("waitForBuildContainer", DockerWaitContainer.class, - dockerWaitContainer -> { - dockerWaitContainer.dependsOn(logBuildContainer); - dockerWaitContainer.targetContainerId(createBuildContainer.getContainerId()); - dockerWaitContainer.doLast(task -> { - if (dockerWaitContainer.getExitCode() != 0) { - throw new GradleException("Failed to build docker image, aborting build"); - } - }); - }); - - final DockerCommitImage commitBuildImageResult = project.getTasks().create("commitBuildImageResult", DockerCommitImage.class, - dockerCommitImage -> { - dockerCommitImage.dependsOn(waitForBuildContainer); - dockerCommitImage.targetContainerId(createBuildContainer.getContainerId()); - }); - - - final DockerTagImage tagBuildImageResult = project.getTasks().create("tagBuildImageResult", DockerTagImage.class, dockerTagImage -> { - dockerTagImage.dependsOn(commitBuildImageResult); - dockerTagImage.getImageId().set(commitBuildImageResult.getImageId()); - dockerTagImage.getTag().set(System.getProperty(PROVIDE_TAG_FOR_BUILDING_PROPERTY, UUID.randomUUID().toString().toLowerCase().substring(0, 12))); - dockerTagImage.getRepository().set(registryName); - }); - - - final DockerPushImage pushBuildImage = project.getTasks().create("pushBuildImage", DockerPushImage.class, dockerPushImage -> { - dockerPushImage.dependsOn(tagBuildImageResult); - dockerPushImage.doFirst(task -> dockerPushImage.setRegistryCredentials(registryCredentialsForPush)); - dockerPushImage.getImageName().set(registryName); - dockerPushImage.getTag().set(tagBuildImageResult.getTag()); - }); - - this.pushTask = pushBuildImage; - - - final DockerRemoveContainer deleteContainer = project.getTasks().create("deleteBuildContainer", DockerRemoveContainer.class, - dockerRemoveContainer -> { - dockerRemoveContainer.dependsOn(pushBuildImage); - dockerRemoveContainer.targetContainerId(createBuildContainer.getContainerId()); - }); - - - final DockerRemoveImage deleteTaggedImage = project.getTasks().create("deleteTaggedImage", DockerRemoveImage.class, - dockerRemoveImage -> { - dockerRemoveImage.dependsOn(pushBuildImage); - dockerRemoveImage.getForce().set(true); - dockerRemoveImage.targetImageId(commitBuildImageResult.getImageId()); - }); - - final DockerRemoveImage deleteBuildImage = project.getTasks().create("deleteBuildImage", DockerRemoveImage.class, - dockerRemoveImage -> { - dockerRemoveImage.dependsOn(deleteContainer, deleteTaggedImage); - dockerRemoveImage.getForce().set(true); - dockerRemoveImage.targetImageId(buildDockerImageForSource.getImageId()); - }); - - if (System.getProperty("docker.keep.image") == null) { - pushBuildImage.finalizedBy(deleteContainer, deleteBuildImage, deleteTaggedImage); - } - } -} \ No newline at end of file diff --git a/buildSrc/src/main/groovy/net/corda/testing/KubesTest.java b/buildSrc/src/main/groovy/net/corda/testing/KubesTest.java deleted file mode 100644 index 7bf5b05d59..0000000000 --- a/buildSrc/src/main/groovy/net/corda/testing/KubesTest.java +++ /dev/null @@ -1,589 +0,0 @@ -package net.corda.testing; - -import io.fabric8.kubernetes.api.model.ContainerFluent; -import io.fabric8.kubernetes.api.model.DoneablePod; -import io.fabric8.kubernetes.api.model.PersistentVolumeClaim; -import io.fabric8.kubernetes.api.model.Pod; -import io.fabric8.kubernetes.api.model.PodBuilder; -import io.fabric8.kubernetes.api.model.PodFluent; -import io.fabric8.kubernetes.api.model.PodSpecFluent; -import io.fabric8.kubernetes.api.model.Quantity; -import io.fabric8.kubernetes.api.model.Status; -import io.fabric8.kubernetes.api.model.StatusCause; -import io.fabric8.kubernetes.api.model.StatusDetails; -import io.fabric8.kubernetes.api.model.TolerationBuilder; -import io.fabric8.kubernetes.client.DefaultKubernetesClient; -import io.fabric8.kubernetes.client.KubernetesClient; -import io.fabric8.kubernetes.client.KubernetesClientException; -import io.fabric8.kubernetes.client.Watch; -import io.fabric8.kubernetes.client.Watcher; -import io.fabric8.kubernetes.client.dsl.ExecListener; -import io.fabric8.kubernetes.client.dsl.PodResource; -import io.fabric8.kubernetes.client.utils.Serialization; -import net.corda.testing.retry.Retry; -import okhttp3.Response; -import org.gradle.api.DefaultTask; -import org.gradle.api.tasks.TaskAction; -import org.jetbrains.annotations.NotNull; - -import java.io.BufferedReader; -import java.io.BufferedWriter; -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.PipedInputStream; -import java.io.PipedOutputStream; -import java.io.RandomAccessFile; -import java.math.BigInteger; -import java.nio.channels.FileChannel; -import java.nio.channels.FileLock; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Optional; -import java.util.Queue; -import java.util.Random; -import java.util.Set; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.stream.Collectors; -import java.util.stream.IntStream; - -public class KubesTest extends DefaultTask { - - static final String TEST_RUN_DIR = "/test-runs"; - private static final ExecutorService executorService = Executors.newCachedThreadPool(); - /** - * Name of the k8s Secret object that holds the credentials to access the docker image registry - */ - private static final String REGISTRY_CREDENTIALS_SECRET_NAME = "regcred"; - - private static int DEFAULT_K8S_TIMEOUT_VALUE_MILLIES = 60 * 1_000; - private static int DEFAULT_K8S_WEBSOCKET_TIMEOUT = DEFAULT_K8S_TIMEOUT_VALUE_MILLIES * 30; - private static int DEFAULT_POD_ALLOCATION_TIMEOUT = 60; - - String dockerTag; - String fullTaskToExecutePath; - String taskToExecuteName; - String sidecarImage; - Boolean printOutput = false; - List additionalArgs; - List taints = Collections.emptyList(); - - Integer numberOfCoresPerFork = 4; - Integer memoryGbPerFork = 6; - public volatile List testOutput = Collections.emptyList(); - public volatile List containerResults = Collections.emptyList(); - private final Set remainingPods = Collections.synchronizedSet(new HashSet<>()); - - public static String NAMESPACE = "thisisatest"; - - int numberOfPods = 5; - - DistributeTestsBy distribution = DistributeTestsBy.METHOD; - PodLogLevel podLogLevel = PodLogLevel.INFO; - - @TaskAction - public void runDistributedTests() { - String buildId = System.getProperty("buildId", "0"); - String currentUser = System.getProperty("user.name", "UNKNOWN_USER"); - - String stableRunId = rnd64Base36(new Random(buildId.hashCode() + currentUser.hashCode() + taskToExecuteName.hashCode())); - String random = rnd64Base36(new Random()); - - try (KubernetesClient client = getKubernetesClient()) { - client.pods().inNamespace(NAMESPACE).list().getItems().forEach(podToDelete -> { - if (podToDelete.getMetadata().getName().contains(stableRunId)) { - getProject().getLogger().lifecycle("deleting: " + podToDelete.getMetadata().getName()); - client.resource(podToDelete).delete(); - } - }); - } catch (Exception ignored) { - //it's possible that a pod is being deleted by the original build, this can lead to racey conditions - } - - List> futures = IntStream.range(0, numberOfPods).mapToObj(i -> { - String podName = generatePodName(stableRunId, random, i); - return submitBuild(NAMESPACE, numberOfPods, i, podName, printOutput, 3); - }).collect(Collectors.toList()); - - this.testOutput = Collections.synchronizedList(futures.stream().map(it -> { - try { - return it.get().getBinaryResults(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }).flatMap(Collection::stream).collect(Collectors.toList())); - this.containerResults = futures.stream().map(it -> { - try { - return it.get(); - } catch (InterruptedException | ExecutionException e) { - throw new RuntimeException(e); - } - }).collect(Collectors.toList()); - } - - @NotNull - private String generatePodName(String stableRunId, String random, int i) { - int magicMaxLength = 63; - String provisionalName = taskToExecuteName.toLowerCase() + "-" + stableRunId + "-" + random + "-" + i; - //length = 100 - //100-63 = 37 - //subString(37, 100) -? string of 63 characters - return provisionalName.substring(Math.max(provisionalName.length() - magicMaxLength, 0)); - } - - @NotNull - private synchronized KubernetesClient getKubernetesClient() { - - try (RandomAccessFile file = new RandomAccessFile("/tmp/refresh.lock", "rw"); - FileChannel c = file.getChannel(); - FileLock lock = c.lock()) { - - getProject().getLogger().quiet("Invoking kubectl to attempt to refresh token"); - ProcessBuilder tokenRefreshCommand = new ProcessBuilder().command("kubectl", "auth", "can-i", "get", "pods"); - Process refreshProcess = tokenRefreshCommand.start(); - int resultCodeOfRefresh = refreshProcess.waitFor(); - getProject().getLogger().quiet("Completed Token refresh"); - - if (resultCodeOfRefresh != 0) { - throw new RuntimeException("Failed to invoke kubectl to refresh tokens"); - } - - } catch (InterruptedException | IOException e) { - throw new RuntimeException(e); - } - - io.fabric8.kubernetes.client.Config config = new io.fabric8.kubernetes.client.ConfigBuilder() - .withConnectionTimeout(DEFAULT_K8S_TIMEOUT_VALUE_MILLIES) - .withRequestTimeout(DEFAULT_K8S_TIMEOUT_VALUE_MILLIES) - .withRollingTimeout(DEFAULT_K8S_TIMEOUT_VALUE_MILLIES) - .withWebsocketTimeout(DEFAULT_K8S_WEBSOCKET_TIMEOUT) - .withWebsocketPingInterval(DEFAULT_K8S_WEBSOCKET_TIMEOUT) - .build(); - - return new DefaultKubernetesClient(config); - } - - private static String rnd64Base36(Random rnd) { - return new BigInteger(64, rnd) - .toString(36) - .toLowerCase(); - } - - private CompletableFuture submitBuild( - String namespace, - int numberOfPods, - int podIdx, - String podName, - boolean printOutput, - int numberOfRetries - ) { - return CompletableFuture.supplyAsync(() -> { - PersistentVolumeClaim pvc = createPvc(podName); - return buildRunPodWithRetriesOrThrow(namespace, numberOfPods, podIdx, podName, printOutput, numberOfRetries, pvc); - }, executorService); - } - - private static void addShutdownHook(Runnable hook) { - Runtime.getRuntime().addShutdownHook(new Thread(hook)); - } - - private PersistentVolumeClaim createPvc(String name) { - PersistentVolumeClaim pvc; - try (KubernetesClient client = getKubernetesClient()) { - pvc = client.persistentVolumeClaims() - .inNamespace(NAMESPACE) - .createNew() - .editOrNewMetadata().withName(name).endMetadata() - .editOrNewSpec() - .withAccessModes("ReadWriteOnce") - .editOrNewResources().addToRequests("storage", new Quantity("100Mi")).endResources() - .withStorageClassName("testing-storage") - .endSpec() - .done(); - } - - addShutdownHook(() -> { - try (KubernetesClient client = getKubernetesClient()) { - System.out.println("Deleting PVC: " + pvc.getMetadata().getName()); - client.persistentVolumeClaims().delete(pvc); - } - }); - return pvc; - } - - private KubePodResult buildRunPodWithRetriesOrThrow( - String namespace, - int numberOfPods, - int podIdx, - String podName, - boolean printOutput, - int numberOfRetries, - PersistentVolumeClaim pvc) { - addShutdownHook(() -> { - System.out.println("deleting pod: " + podName); - try (KubernetesClient client = getKubernetesClient()) { - client.pods().inNamespace(namespace).withName(podName).delete(); - } - }); - - int podNumber = podIdx + 1; - final AtomicInteger testRetries = new AtomicInteger(0); - try { - // pods might die, so we retry - return Retry.fixed(numberOfRetries).run(() -> { - // remove pod if exists - Pod createdPod; - try (KubernetesClient client = getKubernetesClient()) { - PodResource oldPod = client.pods().inNamespace(namespace).withName(podName); - if (oldPod.get() != null) { - getLogger().lifecycle("deleting pod: {}", podName); - oldPod.delete(); - while (oldPod.get() != null) { - getLogger().info("waiting for pod {} to be removed", podName); - Thread.sleep(1000); - } - } - getProject().getLogger().lifecycle("creating pod: " + podName); - createdPod = client.pods().inNamespace(namespace).create(buildPodRequest(podName, pvc, sidecarImage != null)); - remainingPods.add(podName); - getProject().getLogger().lifecycle("scheduled pod: " + podName); - } - - attachStatusListenerToPod(createdPod); - waitForPodToStart(createdPod); - - PipedOutputStream stdOutOs = new PipedOutputStream(); - PipedInputStream stdOutIs = new PipedInputStream(4096); - ByteArrayOutputStream errChannelStream = new ByteArrayOutputStream(); - - CompletableFuture waiter = new CompletableFuture<>(); - File podLogsDirectory = new File(getProject().getBuildDir(), "pod-logs"); - if (!podLogsDirectory.exists()) { - podLogsDirectory.mkdirs(); - } - - File podOutput = executeBuild(namespace, numberOfPods, podIdx, podName, podLogsDirectory, printOutput, stdOutOs, stdOutIs, errChannelStream, waiter); - int resCode = waiter.join(); - getProject().getLogger().lifecycle("build has ended on on pod " + podName + " (" + podNumber + "/" + numberOfPods + ") with result " + resCode + " , gathering results"); - Collection binaryResults; - //we don't retry on the final attempt as this will crash the build and some pods might not get to finish - if (resCode != 0 && testRetries.getAndIncrement() < numberOfRetries - 1) { - downloadTestXmlFromPod(namespace, createdPod); - getProject().getLogger().lifecycle("There are test failures in this pod. Retrying failed tests!!!"); - throw new RuntimeException("There are test failures in this pod"); - } else { - binaryResults = downloadTestXmlFromPod(namespace, createdPod); - } - - getLogger().lifecycle("removing pod " + podName + " (" + podNumber + "/" + numberOfPods + ") after completed build"); - - try (KubernetesClient client = getKubernetesClient()) { - client.pods().delete(createdPod); - client.persistentVolumeClaims().delete(pvc); - synchronized (remainingPods) { - remainingPods.remove(podName); - getLogger().lifecycle("Remaining Pods: "); - remainingPods.forEach(pod -> getLogger().lifecycle("\t" + pod)); - } - } - return new KubePodResult(podIdx, resCode, podOutput, binaryResults); - }); - } catch (Retry.RetryException e) { - Pod pod = getKubernetesClient().pods().inNamespace(namespace).create(buildPodRequest(podName, pvc, sidecarImage != null)); - downloadTestXmlFromPod(namespace, pod); - throw new RuntimeException("Failed to build in pod " + podName + " (" + podNumber + "/" + numberOfPods + ") in " + numberOfRetries + " attempts", e); - } - } - - @NotNull - private File executeBuild(String namespace, - int numberOfPods, - int podIdx, - String podName, - File podLogsDirectory, - boolean printOutput, - PipedOutputStream stdOutOs, - PipedInputStream stdOutIs, - ByteArrayOutputStream errChannelStream, - CompletableFuture waiter) throws IOException { - KubernetesClient client = getKubernetesClient(); - ExecListener execListener = buildExecListenerForPod(podName, errChannelStream, waiter, client); - stdOutIs.connect(stdOutOs); - - String[] buildCommand = getBuildCommand(numberOfPods, podIdx); - getProject().getLogger().quiet("About to execute " + Arrays.stream(buildCommand).reduce("", (s, s2) -> s + " " + s2) + " on pod " + podName); - client.pods().inNamespace(namespace).withName(podName) - .inContainer(podName) - .writingOutput(stdOutOs) - .writingErrorChannel(errChannelStream) - .usingListener(execListener) - .exec(buildCommand); - - return startLogPumping(stdOutIs, podIdx, podLogsDirectory, printOutput); - } - - private Pod buildPodRequest(String podName, PersistentVolumeClaim pvc, boolean withDb) { - if (withDb) { - return buildPodRequestWithWorkerNodeAndDbContainer(podName, pvc); - } else { - return buildPodRequestWithOnlyWorkerNode(podName, pvc); - } - } - - private Pod buildPodRequestWithOnlyWorkerNode(String podName, PersistentVolumeClaim pvc) { - return getBasePodDefinition(podName, pvc) - .addToRequests("cpu", new Quantity(numberOfCoresPerFork.toString())) - .addToRequests("memory", new Quantity(memoryGbPerFork.toString())) - .endResources() - .addNewVolumeMount().withName("gradlecache").withMountPath("/tmp/gradle").endVolumeMount() - .addNewVolumeMount().withName("testruns").withMountPath(TEST_RUN_DIR).endVolumeMount() - .endContainer() - .addNewImagePullSecret(REGISTRY_CREDENTIALS_SECRET_NAME) - .withRestartPolicy("Never") - .endSpec() - .build(); - } - - private Pod buildPodRequestWithWorkerNodeAndDbContainer(String podName, PersistentVolumeClaim pvc) { - return getBasePodDefinition(podName, pvc) - .addToRequests("cpu", new Quantity(Integer.valueOf(numberOfCoresPerFork - 1).toString())) - .addToRequests("memory", new Quantity(Integer.valueOf(memoryGbPerFork - 1).toString() + "Gi")) - .endResources() - .addNewVolumeMount().withName("gradlecache").withMountPath("/tmp/gradle").endVolumeMount() - .addNewVolumeMount().withName("testruns").withMountPath(TEST_RUN_DIR).endVolumeMount() - .endContainer() - .addNewContainer() - .withImage(sidecarImage) - .addNewEnv() - .withName("DRIVER_NODE_MEMORY") - .withValue("1024m") - .withName("DRIVER_WEB_MEMORY") - .withValue("1024m") - .endEnv() - .withName(podName + "-pg") - .withNewResources() - .addToRequests("cpu", new Quantity("1")) - .addToRequests("memory", new Quantity("1Gi")) - .endResources() - .endContainer() - - .addNewImagePullSecret(REGISTRY_CREDENTIALS_SECRET_NAME) - .withRestartPolicy("Never") - .endSpec() - .build(); - } - - private ContainerFluent.ResourcesNested>> getBasePodDefinition(String podName, PersistentVolumeClaim pvc) { - return new PodBuilder() - .withNewMetadata().withName(podName).endMetadata() - .withNewSpec() - - .addNewVolume() - .withName("gradlecache") - .withNewHostPath() - .withType("DirectoryOrCreate") - .withPath("/tmp/gradle") - .endHostPath() - .endVolume() - .addNewVolume() - .withName("testruns") - .withNewPersistentVolumeClaim() - .withClaimName(pvc.getMetadata().getName()) - .endPersistentVolumeClaim() - .endVolume() - .withTolerations(taints.stream().map(taint -> new TolerationBuilder().withKey("key").withValue(taint).withOperator("Equal").withEffect("NoSchedule").build()).collect(Collectors.toList())) - .addNewContainer() - .withImage(dockerTag) - .withCommand("bash") - .withArgs("-c", "sleep 3600") - .addNewEnv() - .withName("DRIVER_NODE_MEMORY") - .withValue("1024m") - .withName("DRIVER_WEB_MEMORY") - .withValue("1024m") - .endEnv() - .withName(podName) - .withNewResources(); - } - - - private File startLogPumping(InputStream stdOutIs, int podIdx, File podLogsDirectory, boolean printOutput) throws IOException { - File outputDir = new File(podLogsDirectory, taskToExecuteName); - outputDir.mkdirs(); - File outputFile = new File(outputDir, "container-" + podIdx + ".log"); - outputFile.createNewFile(); - Thread loggingThread = new Thread(() -> { - try (BufferedWriter out = new BufferedWriter(new FileWriter(outputFile, true)); - BufferedReader br = new BufferedReader(new InputStreamReader(stdOutIs))) { - String line; - while ((line = br.readLine()) != null) { - String toWrite = ("Container" + podIdx + ": " + line).trim(); - if (printOutput) { - getProject().getLogger().lifecycle(toWrite); - } - out.write(line); - out.newLine(); - } - } catch (IOException ignored) { - } - }); - - loggingThread.setDaemon(true); - loggingThread.start(); - return outputFile; - } - - private Watch attachStatusListenerToPod(Pod pod) { - KubernetesClient client = getKubernetesClient(); - return client.pods().inNamespace(pod.getMetadata().getNamespace()).withName(pod.getMetadata().getName()).watch(new Watcher() { - @Override - public void eventReceived(Watcher.Action action, Pod resource) { - getProject().getLogger().lifecycle("[StatusChange] pod " + resource.getMetadata().getName() + " " + action.name() + " (" + resource.getStatus().getPhase() + ")"); - } - - @Override - public void onClose(KubernetesClientException cause) { - client.close(); - } - }); - } - - private void waitForPodToStart(Pod pod) { - try (KubernetesClient client = getKubernetesClient()) { - getProject().getLogger().lifecycle("Waiting for pod " + pod.getMetadata().getName() + " to start before executing build"); - try { - client.pods().inNamespace(pod.getMetadata().getNamespace()).withName(pod.getMetadata().getName()).waitUntilReady(DEFAULT_POD_ALLOCATION_TIMEOUT, TimeUnit.MINUTES); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - getProject().getLogger().lifecycle("pod " + pod.getMetadata().getName() + " has started, executing build"); - } - } - - private Collection downloadTestXmlFromPod(String namespace, Pod cp) { - String resultsInContainerPath = TEST_RUN_DIR + "/test-reports"; - String binaryResultsFile = "results.bin"; - String podName = cp.getMetadata().getName(); - Path tempDir = new File(new File(getProject().getBuildDir(), "test-results-xml"), podName).toPath(); - - if (!tempDir.toFile().exists()) { - tempDir.toFile().mkdirs(); - } - getProject().getLogger().lifecycle("Saving " + podName + " results to: " + tempDir.toAbsolutePath().toFile().getAbsolutePath()); - try (KubernetesClient client = getKubernetesClient()) { - client.pods() - .inNamespace(namespace) - .withName(podName) - .inContainer(podName) - .dir(resultsInContainerPath) - .copy(tempDir); - } - return findFolderContainingBinaryResultsFile(new File(tempDir.toFile().getAbsolutePath()), binaryResultsFile); - } - - private String[] getBuildCommand(int numberOfPods, int podIdx) { - final String gitBranch = " -Dgit.branch=" + Properties.getGitBranch(); - final String gitTargetBranch = " -Dgit.target.branch=" + Properties.getTargetGitBranch(); - final String artifactoryUsername = " -Dartifactory.username=" + Properties.getUsername() + " "; - final String artifactoryPassword = " -Dartifactory.password=" + Properties.getPassword() + " "; - final String additionalArgs = this.additionalArgs.isEmpty() ? "" : String.join(" ", this.additionalArgs); - - String shellScript = "(let x=1 ; while [ ${x} -ne 0 ] ; do echo \"Waiting for DNS\" ; curl services.gradle.org > /dev/null 2>&1 ; x=$? ; sleep 1 ; done ) && " - + " cd /tmp/source && " + - "(let y=1 ; while [ ${y} -ne 0 ] ; do echo \"Preparing build directory\" ; ./gradlew testClasses integrationTestClasses --parallel 2>&1 ; y=$? ; sleep 1 ; done ) && " + - "(./gradlew -D" + ListTests.DISTRIBUTION_PROPERTY + "=" + distribution.name() + - gitBranch + - gitTargetBranch + - artifactoryUsername + - artifactoryPassword + - "-Dkubenetize -PdockerFork=" + podIdx + " -PdockerForks=" + numberOfPods + " " + fullTaskToExecutePath + " " + additionalArgs + " " + getLoggingLevel() + " 2>&1) ; " + - "let rs=$? ; sleep 10 ; exit ${rs}"; - return new String[]{"bash", "-c", shellScript}; - } - - private String getLoggingLevel() { - - switch (podLogLevel) { - case INFO: - return " --info"; - case WARN: - return " --warn"; - case QUIET: - return " --quiet"; - case DEBUG: - return " --debug"; - default: - throw new IllegalArgumentException("LogLevel: " + podLogLevel + " is unknown"); - } - - } - - private List findFolderContainingBinaryResultsFile(File start, String fileNameToFind) { - Queue filesToInspect = new LinkedList<>(Collections.singletonList(start)); - List folders = new ArrayList<>(); - while (!filesToInspect.isEmpty()) { - File fileToInspect = filesToInspect.poll(); - if (fileToInspect.getAbsolutePath().endsWith(fileNameToFind)) { - folders.add(fileToInspect.getParentFile()); - } - - if (fileToInspect.isDirectory()) { - filesToInspect.addAll(Arrays.stream(Optional.ofNullable(fileToInspect.listFiles()).orElse(new File[]{})).collect(Collectors.toList())); - } - } - return folders; - } - - private ExecListener buildExecListenerForPod(String podName, ByteArrayOutputStream errChannelStream, CompletableFuture waitingFuture, KubernetesClient client) { - - return new ExecListener() { - final Long start = System.currentTimeMillis(); - - @Override - public void onOpen(Response response) { - getProject().getLogger().lifecycle("Build started on pod " + podName); - } - - @Override - public void onFailure(Throwable t, Response response) { - getProject().getLogger().lifecycle("Received error from pod " + podName); - waitingFuture.completeExceptionally(t); - } - - @Override - public void onClose(int code, String reason) { - getProject().getLogger().lifecycle("Received onClose() from pod " + podName + " , build took: " + ((System.currentTimeMillis() - start) / 1000) + " seconds"); - try { - String errChannelContents = errChannelStream.toString(); - Status status = Serialization.unmarshal(errChannelContents, Status.class); - Integer resultCode = Optional.ofNullable(status).map(Status::getDetails) - .map(StatusDetails::getCauses) - .flatMap(c -> c.stream().findFirst()) - .map(StatusCause::getMessage) - .map(Integer::parseInt).orElse(0); - waitingFuture.complete(resultCode); - } catch (Exception e) { - waitingFuture.completeExceptionally(e); - } finally { - client.close(); - } - } - }; - } - -} diff --git a/buildSrc/src/main/groovy/net/corda/testing/ListShufflerAndAllocator.java b/buildSrc/src/main/groovy/net/corda/testing/ListShufflerAndAllocator.java deleted file mode 100644 index 6b25e3242c..0000000000 --- a/buildSrc/src/main/groovy/net/corda/testing/ListShufflerAndAllocator.java +++ /dev/null @@ -1,37 +0,0 @@ -package net.corda.testing; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.Random; -import java.util.stream.Collectors; - -class ListShufflerAndAllocator { - - private final List tests; - - public ListShufflerAndAllocator(List tests) { - this.tests = new ArrayList<>(tests); - } - - public List getTestsForFork(int fork, int forks, Integer seed) { - final Random shuffler = new Random(seed); - final List copy = new ArrayList<>(tests); - while (copy.size() < forks) { - //pad the list - copy.add(null); - } - Collections.shuffle(copy, shuffler); - final int numberOfTestsPerFork = Math.max((copy.size() / forks), 1); - final int consumedTests = numberOfTestsPerFork * forks; - final int ourStartIdx = numberOfTestsPerFork * fork; - final int ourEndIdx = ourStartIdx + numberOfTestsPerFork; - final int ourSupplementaryIdx = consumedTests + fork; - final ArrayList toReturn = new ArrayList<>(copy.subList(ourStartIdx, ourEndIdx)); - if (ourSupplementaryIdx < copy.size()) { - toReturn.add(copy.get(ourSupplementaryIdx)); - } - return toReturn.stream().filter(Objects::nonNull).collect(Collectors.toList()); - } -} \ No newline at end of file diff --git a/buildSrc/src/main/groovy/net/corda/testing/ListTests.java b/buildSrc/src/main/groovy/net/corda/testing/ListTests.java deleted file mode 100644 index 7c7e2804c4..0000000000 --- a/buildSrc/src/main/groovy/net/corda/testing/ListTests.java +++ /dev/null @@ -1,89 +0,0 @@ -package net.corda.testing; - -import io.github.classgraph.ClassGraph; -import io.github.classgraph.ClassInfo; -import io.github.classgraph.ClassInfoList; -import org.gradle.api.DefaultTask; -import org.gradle.api.file.FileCollection; -import org.gradle.api.tasks.TaskAction; -import org.jetbrains.annotations.NotNull; - -import java.math.BigInteger; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -interface TestLister { - List getAllTestsDiscovered(); -} - -public class ListTests extends DefaultTask implements TestLister { - - public static final String DISTRIBUTION_PROPERTY = "distributeBy"; - - public FileCollection scanClassPath; - private List allTests; - private DistributeTestsBy distribution = System.getProperty(DISTRIBUTION_PROPERTY) != null && !System.getProperty(DISTRIBUTION_PROPERTY).isEmpty() ? - DistributeTestsBy.valueOf(System.getProperty(DISTRIBUTION_PROPERTY)) : DistributeTestsBy.METHOD; - - public List getTestsForFork(int fork, int forks, Integer seed) { - BigInteger gitSha = new BigInteger(getProject().hasProperty("corda_revision") ? - getProject().property("corda_revision").toString() : "0", 36); - if (fork >= forks) { - throw new IllegalArgumentException("requested shard ${fork + 1} for total shards ${forks}"); - } - int seedToUse = seed != null ? (seed + (this.getPath()).hashCode() + gitSha.intValue()) : 0; - return new ListShufflerAndAllocator(allTests).getTestsForFork(fork, forks, seedToUse); - } - - @Override - public List getAllTestsDiscovered() { - return new ArrayList<>(allTests); - } - - @TaskAction - void discoverTests() { - Collection results; - switch (distribution) { - case METHOD: - results = getClassGraphStreamOfTestClasses() - .map(classInfo -> classInfo.getMethodInfo().filter(methodInfo -> methodInfo.hasAnnotation("org.junit.Test")) - .stream().map(methodInfo -> classInfo.getName() + "." + methodInfo.getName())) - .flatMap(Function.identity()) - .collect(Collectors.toSet()); - - this.allTests = results.stream().sorted().collect(Collectors.toList()); - break; - case CLASS: - results = getClassGraphStreamOfTestClasses() - .map(ClassInfo::getName) - .collect(Collectors.toSet()); - this.allTests = results.stream().sorted().collect(Collectors.toList()); - break; - } - } - - @NotNull - private Stream getClassGraphStreamOfTestClasses() { - return new ClassGraph() - .enableClassInfo() - .enableMethodInfo() - .ignoreClassVisibility() - .ignoreMethodVisibility() - .enableAnnotationInfo() - .overrideClasspath(scanClassPath) - .scan() - .getClassesWithMethodAnnotation("org.junit.Test") - .stream() - .map(classInfo -> { - ClassInfoList returnList = new ClassInfoList(); - returnList.add(classInfo); - returnList.addAll(classInfo.getSubclasses()); - return returnList; - }) - .flatMap(ClassInfoList::stream); - } -} \ No newline at end of file diff --git a/buildSrc/src/main/groovy/net/corda/testing/ParallelTestGroup.java b/buildSrc/src/main/groovy/net/corda/testing/ParallelTestGroup.java deleted file mode 100644 index 6b8ac65638..0000000000 --- a/buildSrc/src/main/groovy/net/corda/testing/ParallelTestGroup.java +++ /dev/null @@ -1,115 +0,0 @@ -package net.corda.testing; - -import org.gradle.api.DefaultTask; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -public class ParallelTestGroup extends DefaultTask { - - private DistributeTestsBy distribution = DistributeTestsBy.METHOD; - private List groups = new ArrayList<>(); - private int shardCount = 20; - private int coresToUse = 4; - private int gbOfMemory = 4; - private boolean printToStdOut = true; - private PodLogLevel logLevel = PodLogLevel.INFO; - private String sidecarImage; - private List additionalArgs = new ArrayList<>(); - private List taints = new ArrayList<>(); - - public DistributeTestsBy getDistribution() { - return distribution; - } - - public List getGroups() { - return groups; - } - - public int getShardCount() { - return shardCount; - } - - public int getCoresToUse() { - return coresToUse; - } - - public int getGbOfMemory() { - return gbOfMemory; - } - - public boolean getPrintToStdOut() { - return printToStdOut; - } - - public PodLogLevel getLogLevel() { - return logLevel; - } - - public String getSidecarImage() { - return sidecarImage; - } - - public List getAdditionalArgs() { - return additionalArgs; - } - - public List getNodeTaints(){ - return new ArrayList<>(taints); - } - - public void numberOfShards(int shards) { - this.shardCount = shards; - } - - public void podLogLevel(PodLogLevel level) { - this.logLevel = level; - } - - public void distribute(DistributeTestsBy dist) { - this.distribution = dist; - } - - public void coresPerFork(int cores) { - this.coresToUse = cores; - } - - public void memoryInGbPerFork(int gb) { - this.gbOfMemory = gb; - } - - //when this is false, only containers will "failed" exit codes will be printed to stdout - public void streamOutput(boolean print) { - this.printToStdOut = print; - } - - public void testGroups(String... group) { - testGroups(Arrays.asList(group)); - } - - private void testGroups(List group) { - groups.addAll(group); - } - - public void sidecarImage(String sidecarImage) { - this.sidecarImage = sidecarImage; - } - - public void additionalArgs(String... additionalArgs) { - additionalArgs(Arrays.asList(additionalArgs)); - } - - private void additionalArgs(List additionalArgs) { - this.additionalArgs.addAll(additionalArgs); - } - - public void nodeTaints(String... additionalArgs) { - nodeTaints(Arrays.asList(additionalArgs)); - } - - private void nodeTaints(List additionalArgs) { - this.taints.addAll(additionalArgs); - } - -} diff --git a/buildSrc/src/main/groovy/net/corda/testing/PodAllocator.java b/buildSrc/src/main/groovy/net/corda/testing/PodAllocator.java deleted file mode 100644 index d88fc93cd1..0000000000 --- a/buildSrc/src/main/groovy/net/corda/testing/PodAllocator.java +++ /dev/null @@ -1,147 +0,0 @@ -package net.corda.testing; - -import io.fabric8.kubernetes.api.model.Quantity; -import io.fabric8.kubernetes.api.model.TolerationBuilder; -import io.fabric8.kubernetes.api.model.batch.Job; -import io.fabric8.kubernetes.api.model.batch.JobBuilder; -import io.fabric8.kubernetes.client.Config; -import io.fabric8.kubernetes.client.ConfigBuilder; -import io.fabric8.kubernetes.client.DefaultKubernetesClient; -import io.fabric8.kubernetes.client.KubernetesClient; -import io.fabric8.kubernetes.client.KubernetesClientException; -import io.fabric8.kubernetes.client.Watch; -import io.fabric8.kubernetes.client.Watcher; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Comparator; -import java.util.List; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.TimeoutException; -import java.util.stream.Collectors; -import java.util.stream.IntStream; -import java.util.stream.Stream; - -public class PodAllocator { - - private static final int CONNECTION_TIMEOUT = 60_1000; - private final Logger logger; - - public PodAllocator(Logger logger) { - this.logger = logger; - } - - public PodAllocator() { - this.logger = LoggerFactory.getLogger(PodAllocator.class); - } - - public void allocatePods(Integer number, - Integer coresPerPod, - Integer memoryPerPod, - String prefix, - List taints) { - - Config config = getConfig(); - KubernetesClient client = new DefaultKubernetesClient(config); - - List podsToRequest = IntStream.range(0, number).mapToObj(i -> buildJob("pa-" + prefix + i, coresPerPod, memoryPerPod, taints)).collect(Collectors.toList()); - List createdJobs = podsToRequest.stream().map(requestedJob -> { - String msg = "PreAllocating " + requestedJob.getMetadata().getName(); - if (logger instanceof org.gradle.api.logging.Logger) { - ((org.gradle.api.logging.Logger) logger).quiet(msg); - } else { - logger.info(msg); - } - return client.batch().jobs().inNamespace(KubesTest.NAMESPACE).create(requestedJob); - }).collect(Collectors.toList()); - - Runtime.getRuntime().addShutdownHook(new Thread(() -> { - KubernetesClient tearDownClient = new DefaultKubernetesClient(getConfig()); - tearDownClient.batch().jobs().delete(createdJobs); - })); - } - - private Config getConfig() { - return new ConfigBuilder() - .withConnectionTimeout(CONNECTION_TIMEOUT) - .withRequestTimeout(CONNECTION_TIMEOUT) - .withRollingTimeout(CONNECTION_TIMEOUT) - .withWebsocketTimeout(CONNECTION_TIMEOUT) - .withWebsocketPingInterval(CONNECTION_TIMEOUT) - .build(); - } - - public void tearDownPods(String prefix) { - io.fabric8.kubernetes.client.Config config = getConfig(); - KubernetesClient client = new DefaultKubernetesClient(config); - Stream jobsToDelete = client.batch().jobs().inNamespace(KubesTest.NAMESPACE).list() - .getItems() - .stream() - .sorted(Comparator.comparing(p -> p.getMetadata().getName())) - .filter(foundPod -> foundPod.getMetadata().getName().contains(prefix)); - - List> deleteFutures = jobsToDelete.map(job -> { - CompletableFuture result = new CompletableFuture<>(); - Watch watch = client.batch().jobs().inNamespace(job.getMetadata().getNamespace()).withName(job.getMetadata().getName()).watch(new Watcher() { - @Override - public void eventReceived(Action action, Job resource) { - if (action == Action.DELETED) { - result.complete(resource); - String msg = "Successfully deleted job " + job.getMetadata().getName(); - logger.info(msg); - } - } - - @Override - public void onClose(KubernetesClientException cause) { - String message = "Failed to delete job " + job.getMetadata().getName(); - if (logger instanceof org.gradle.api.logging.Logger) { - ((org.gradle.api.logging.Logger) logger).error(message); - } else { - logger.info(message); - } - result.completeExceptionally(cause); - } - }); - client.batch().jobs().delete(job); - return result; - }).collect(Collectors.toList()); - - try { - CompletableFuture.allOf(deleteFutures.toArray(new CompletableFuture[0])).get(5, TimeUnit.MINUTES); - } catch (InterruptedException | ExecutionException | TimeoutException e) { - //ignore - there's nothing left to do - } - } - - - Job buildJob(String podName, Integer coresPerPod, Integer memoryPerPod, List taints) { - return new JobBuilder().withNewMetadata().withName(podName).endMetadata() - .withNewSpec() - .withTtlSecondsAfterFinished(10) - .withNewTemplate() - .withNewMetadata() - .withName(podName + "-pod") - .endMetadata() - .withNewSpec() - .withTolerations(taints.stream().map(taint -> new TolerationBuilder().withKey("key").withValue(taint).withOperator("Equal").withEffect("NoSchedule").build()).collect(Collectors.toList())) - .addNewContainer() - .withImage("busybox:latest") - .withCommand("sh") - .withArgs("-c", "sleep 300") - .withName(podName) - .withNewResources() - .addToRequests("cpu", new Quantity(coresPerPod.toString())) - .addToRequests("memory", new Quantity(memoryPerPod.toString() + "Gi")) - .endResources() - .endContainer() - .withRestartPolicy("Never") - .endSpec() - .endTemplate() - .endSpec() - .build(); - } - -} diff --git a/buildSrc/src/main/groovy/net/corda/testing/PodLogLevel.java b/buildSrc/src/main/groovy/net/corda/testing/PodLogLevel.java deleted file mode 100644 index 27ce56f9f2..0000000000 --- a/buildSrc/src/main/groovy/net/corda/testing/PodLogLevel.java +++ /dev/null @@ -1,5 +0,0 @@ -package net.corda.testing; - -public enum PodLogLevel { - QUIET, WARN, INFO, DEBUG -} \ No newline at end of file diff --git a/buildSrc/src/main/groovy/net/corda/testing/Properties.java b/buildSrc/src/main/groovy/net/corda/testing/Properties.java deleted file mode 100644 index b8813e787f..0000000000 --- a/buildSrc/src/main/groovy/net/corda/testing/Properties.java +++ /dev/null @@ -1,91 +0,0 @@ -package net.corda.testing; - -import org.jetbrains.annotations.NotNull; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * A single class to hold some of the properties we need to get from the command line - * in order to store test results in Artifactory. - */ -public class Properties { - private static final Logger LOG = LoggerFactory.getLogger(Properties.class); - - private static String ROOT_PROJECT_TYPE = "corda"; // corda or enterprise - - /** - * Get the Corda type. Used in the tag names when we store in Artifactory. - * - * @return either 'corda' or 'enterprise' - */ - static String getRootProjectType() { - return ROOT_PROJECT_TYPE; - } - - /** - * Set the Corda (repo) type - either enterprise, or corda (open-source). - * Used in the tag names when we store in Artifactory. - * - * @param rootProjectType the corda repo type. - */ - static void setRootProjectType(@NotNull final String rootProjectType) { - ROOT_PROJECT_TYPE = rootProjectType; - } - - /** - * Get property with logging - * - * @param key property to get - * @return empty string, or trimmed value - */ - @NotNull - static String getProperty(@NotNull final String key) { - final String value = System.getProperty(key, "").trim(); - if (value.isEmpty()) { - LOG.debug("Property '{}' not set", key); - } else { - LOG.debug("Ok. Property '{}' is set", key); - } - return value; - } - - /** - * Get Artifactory username - * - * @return the username - */ - static String getUsername() { - return getProperty("artifactory.username"); - } - - /** - * Get Artifactory password - * - * @return the password - */ - static String getPassword() { - return getProperty("artifactory.password"); - } - - /** - * The current branch/tag - * - * @return the current branch - */ - @NotNull - static String getGitBranch() { - return getProperty("git.branch").replace('/', '-'); - } - - /** - * @return the branch that this branch was likely checked out from. - */ - @NotNull - static String getTargetGitBranch() { - return getProperty("git.target.branch").replace('/', '-'); - } - - static boolean getPublishJunitTests() { - return ! getProperty("publish.junit").isEmpty(); - } -} diff --git a/buildSrc/src/main/groovy/net/corda/testing/TestDurationArtifacts.java b/buildSrc/src/main/groovy/net/corda/testing/TestDurationArtifacts.java deleted file mode 100644 index c29809c4e9..0000000000 --- a/buildSrc/src/main/groovy/net/corda/testing/TestDurationArtifacts.java +++ /dev/null @@ -1,430 +0,0 @@ -package net.corda.testing; - -import groovy.lang.Tuple2; -import org.apache.commons.compress.archivers.ArchiveEntry; -import org.apache.commons.compress.archivers.ArchiveException; -import org.apache.commons.compress.archivers.ArchiveInputStream; -import org.apache.commons.compress.archivers.ArchiveStreamFactory; -import org.apache.commons.compress.utils.IOUtils; -import org.gradle.api.Project; -import org.gradle.api.Task; -import org.gradle.api.tasks.bundling.Zip; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.w3c.dom.Document; -import org.w3c.dom.NamedNodeMap; -import org.w3c.dom.Node; -import org.w3c.dom.NodeList; -import org.xml.sax.SAXException; - -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; -import javax.xml.xpath.XPath; -import javax.xml.xpath.XPathConstants; -import javax.xml.xpath.XPathExpression; -import javax.xml.xpath.XPathExpressionException; -import javax.xml.xpath.XPathFactory; -import java.io.BufferedInputStream; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.FileWriter; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.nio.file.FileSystems; -import java.nio.file.FileVisitResult; -import java.nio.file.FileVisitor; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.PathMatcher; -import java.nio.file.attribute.BasicFileAttributes; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.function.BiFunction; -import java.util.function.Supplier; - -/** - * Get or put test artifacts to/from a REST endpoint. The expected format is a zip file of junit XML files. - * See https://www.jfrog.com/confluence/display/RTF/Artifactory+REST+API - */ -public class TestDurationArtifacts { - private static final String EXTENSION = "zip"; - private static final String BASE_URL = "https://software.r3.com/artifactory/corda-test-results/net/corda"; - private static final Logger LOG = LoggerFactory.getLogger(TestDurationArtifacts.class); - private static final String ARTIFACT = "tests-durations"; - // The one and only set of tests information. We load these at the start of a build, and update them and save them at the end. - static Tests tests = new Tests(); - - // Artifactory API - private final Artifactory artifactory = new Artifactory(); - - /** - * Write out the test durations as a CSV file. - * Reload the tests from artifactory and update with the latest run. - * - * @param project project that we are attaching the test to. - * @param name basename for the test. - * @return the csv task - */ - private static Task createCsvTask(@NotNull final Project project, @NotNull final String name) { - return project.getTasks().create("createCsvFromXmlFor" + capitalize(name), Task.class, t -> { - t.setGroup(DistributedTesting.GRADLE_GROUP); - t.setDescription("Create csv from all discovered junit xml files"); - - // Parse all the junit results and write them to a csv file. - t.doFirst(task -> { - project.getLogger().warn("About to create CSV file and zip it"); - - // Reload the test object from artifactory - loadTests(); - // Get the list of junit xml artifacts - final List testXmlFiles = getTestXmlFiles(project.getBuildDir().getAbsoluteFile().toPath()); - project.getLogger().warn("Found {} xml junit files", testXmlFiles.size()); - - // Read test xml files for tests and duration and add them to the `Tests` object - // This adjusts the runCount and over all average duration for existing tests. - for (Path testResult : testXmlFiles) { - try { - final List> unitTests = fromJunitXml(new FileInputStream(testResult.toFile())); - - // Add the non-zero duration tests to build up an average. - unitTests.stream() - .filter(t2 -> t2.getSecond() > 0L) - .forEach(unitTest -> tests.addDuration(unitTest.getFirst(), unitTest.getSecond())); - - final long meanDurationForTests = tests.getMeanDurationForTests(); - - // Add the zero duration tests using the mean value so they are fairly distributed over the pods in the next run. - // If we used 'zero' they would all be added to the smallest bucket. - unitTests.stream() - .filter(t2 -> t2.getSecond() <= 0L) - .forEach(unitTest -> tests.addDuration(unitTest.getFirst(), meanDurationForTests)); - - } catch (FileNotFoundException ignored) { - } - } - - // Write the test file to disk. - try { - final FileWriter writer = new FileWriter(new File(project.getRootDir(), ARTIFACT + ".csv")); - tests.write(writer); - LOG.warn("Written tests csv file with {} tests", tests.size()); - } catch (IOException ignored) { - } - }); - }); - } - - @NotNull - static String capitalize(@NotNull final String str) { - return str.substring(0, 1).toUpperCase() + str.substring(1); // groovy has this as an extension method - } - - /** - * Discover junit xml files, zip them, and upload to artifactory. - * - * @param project root project - * @param name task name that we're 'extending' - * @return gradle task - */ - @NotNull - private static Task createJunitZipTask(@NotNull final Project project, @NotNull final String name) { - return project.getTasks().create("zipJunitXmlFilesAndUploadFor" + capitalize(name), Zip.class, z -> { - z.setGroup(DistributedTesting.GRADLE_GROUP); - z.setDescription("Zip junit files and upload to artifactory"); - - z.getArchiveFileName().set(Artifactory.getFileName("junit", EXTENSION, getBranchTag())); - z.getDestinationDirectory().set(project.getRootDir()); - z.setIncludeEmptyDirs(false); - z.from(project.getRootDir(), task -> task.include("**/build/test-results-xml/**/*.xml", "**/build/test-results/**/*.xml")); - z.doLast(task -> { - try (FileInputStream inputStream = new FileInputStream(new File(z.getArchiveFileName().get()))) { - new Artifactory().put(BASE_URL, getBranchTag(), "junit", EXTENSION, inputStream); - } catch (Exception ignored) { - } - }); - }); - } - - /** - * Zip and upload test-duration csv files to artifactory - * - * @param project root project that we're attaching the task to - * @param name the task name we're 'extending' - * @return gradle task - */ - @NotNull - private static Task createCsvZipAndUploadTask(@NotNull final Project project, @NotNull final String name) { - return project.getTasks().create("zipCsvFilesAndUploadFor" + capitalize(name), Zip.class, z -> { - z.setGroup(DistributedTesting.GRADLE_GROUP); - z.setDescription("Zips test duration csv and uploads to artifactory"); - - z.getArchiveFileName().set(Artifactory.getFileName(ARTIFACT, EXTENSION, getBranchTag())); - z.getDestinationDirectory().set(project.getRootDir()); - z.setIncludeEmptyDirs(false); - - // There's only one csv, but glob it anyway. - z.from(project.getRootDir(), task -> task.include("**/" + ARTIFACT + ".csv")); - - // ...base class method zips the CSV... - - z.doLast(task -> { - // We've now created the one csv file containing the tests and their mean durations, - // this task has zipped it, so we now just upload it. - project.getLogger().warn("SAVING tests"); - project.getLogger().warn("Attempting to upload {}", z.getArchiveFileName().get()); - try (FileInputStream inputStream = new FileInputStream(new File(z.getArchiveFileName().get()))) { - if (!new TestDurationArtifacts().put(getBranchTag(), inputStream)) { - project.getLogger().warn("Could not upload zip of tests"); - } else { - project.getLogger().warn("SAVED tests"); - } - } catch (Exception e) { - project.getLogger().warn("Problem trying to upload: {} {}", z.getArchiveFileName().get(), e.toString()); - } - }); - }); - } - - /** - * Create the Gradle Zip task to gather test information - * - * @param project project to attach this task to - * @param name name of the task - * @param task a task that we depend on when creating the csv so Gradle produces the correct task graph. - * @return a reference to the created task. - */ - @NotNull - public static Task createZipTask(@NotNull final Project project, @NotNull final String name, @Nullable final Task task) { - final Task csvTask = createCsvTask(project, name); - - if (Properties.getPublishJunitTests()) { - final Task zipJunitTask = createJunitZipTask(project, name); - csvTask.dependsOn(zipJunitTask); - } - - if (task != null) { - csvTask.dependsOn(task); - } - final Task zipCsvTask = createCsvZipAndUploadTask(project, name); - zipCsvTask.dependsOn(csvTask); // we have to create the csv before we can zip it. - - return zipCsvTask; - } - - static List getTestXmlFiles(@NotNull final Path rootDir) { - List paths = new ArrayList<>(); - List matchers = new ArrayList<>(); - matchers.add(FileSystems.getDefault().getPathMatcher("glob:**/test-results-xml/**/*.xml")); - try { - Files.walkFileTree(rootDir, new FileVisitor() { - @Override - public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) { - return FileVisitResult.CONTINUE; - } - - @Override - public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) { - for (PathMatcher matcher : matchers) { - if (matcher.matches(file)) { - paths.add(file); - break; - } - } - - return FileVisitResult.CONTINUE; - } - - @Override - public FileVisitResult visitFileFailed(Path file, IOException exc) { - return FileVisitResult.CONTINUE; - } - - @Override - public FileVisitResult postVisitDirectory(Path dir, IOException exc) { - return FileVisitResult.CONTINUE; - } - }); - } catch (IOException e) { - LOG.warn("Could not walk tree and get all test xml files: {}", e.toString()); - } - return paths; - } - - /** - * Unzip test results in memory and return test names and durations. - * Assumes the input stream contains only csv files of the correct format. - * - * @param tests reference to the Tests object to be populated. - * @param zippedInputStream stream containing zipped result file(s) - */ - static void addTestsFromZippedCsv(@NotNull final Tests tests, - @NotNull final InputStream zippedInputStream) { - // We need this because ArchiveStream requires the `mark` functionality which is supported in buffered streams. - final BufferedInputStream bufferedInputStream = new BufferedInputStream(zippedInputStream); - try (ArchiveInputStream archiveInputStream = new ArchiveStreamFactory().createArchiveInputStream(bufferedInputStream)) { - ArchiveEntry e; - while ((e = archiveInputStream.getNextEntry()) != null) { - if (e.isDirectory()) continue; - - // We seem to need to take a copy of the original input stream (as positioned by the ArchiveEntry), because - // the XML parsing closes the stream after it has finished. This has the side effect of only parsing the first - // entry in the archive. - ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); - IOUtils.copy(archiveInputStream, outputStream); - ByteArrayInputStream byteInputStream = new ByteArrayInputStream(outputStream.toByteArray()); - - // Read the tests from the (csv) stream - final InputStreamReader reader = new InputStreamReader(byteInputStream); - - // Add the tests to the Tests object - tests.addTests(Tests.read(reader)); - } - } catch (ArchiveException | IOException e) { - LOG.warn("Problem unzipping XML test results"); - } - - LOG.debug("Discovered {} tests", tests.size()); - } - - /** - * For a given stream, return the testcase names and durations. - *

- * NOTE: the input stream will be closed by this method. - * - * @param inputStream an InputStream, closed once parsed - * @return a list of test names and their durations in nanos. - */ - @NotNull - static List> fromJunitXml(@NotNull final InputStream inputStream) { - final DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); - final List> results = new ArrayList<>(); - - try { - final DocumentBuilder builder = dbFactory.newDocumentBuilder(); - final Document document = builder.parse(inputStream); - document.getDocumentElement().normalize(); - final XPathFactory xPathfactory = XPathFactory.newInstance(); - final XPath xpath = xPathfactory.newXPath(); - final XPathExpression expression = xpath.compile("//testcase"); - final NodeList nodeList = (NodeList) expression.evaluate(document, XPathConstants.NODESET); - - final BiFunction get = - (a, k) -> a.getNamedItem(k) != null ? a.getNamedItem(k).getNodeValue() : ""; - - for (int i = 0; i < nodeList.getLength(); i++) { - final Node item = nodeList.item(i); - final NamedNodeMap attributes = item.getAttributes(); - final String testName = get.apply(attributes, "name"); - final String testDuration = get.apply(attributes, "time"); - final String testClassName = get.apply(attributes, "classname"); - - // If the test doesn't have a duration (it should), we return zero. - if (!(testName.isEmpty() || testClassName.isEmpty())) { - final long nanos = !testDuration.isEmpty() ? (long) (Double.parseDouble(testDuration) * 1_000_000_000.0) : 0L; - results.add(new Tuple2<>(testClassName + "." + testName, nanos)); - } else { - LOG.warn("Bad test in junit xml: name={} className={}", testName, testClassName); - } - } - } catch (ParserConfigurationException | IOException | XPathExpressionException | SAXException e) { - return Collections.emptyList(); - } - - return results; - } - - /** - * A supplier of tests. - *

- * We get them from Artifactory and then parse the test xml files to get the duration. - * - * @return a supplier of test results - */ - @NotNull - static Supplier getTestsSupplier() { - return TestDurationArtifacts::loadTests; - } - - /** - * we need to prepend the project type so that we have a unique tag for artifactory - * - * @return - */ - static String getBranchTag() { - return (Properties.getRootProjectType() + "-" + Properties.getGitBranch()).replace('.', '-'); - } - - /** - * we need to prepend the project type so that we have a unique tag artifactory - * - * @return - */ - static String getTargetBranchTag() { - return (Properties.getRootProjectType() + "-" + Properties.getTargetGitBranch()).replace('.', '-'); - } - - /** - * Load the tests from Artifactory, in-memory. No temp file used. Existing test data is cleared. - * - * @return a reference to the loaded tests. - */ - static Tests loadTests() { - LOG.warn("LOADING previous test runs from Artifactory"); - tests.clear(); - try { - final TestDurationArtifacts testArtifacts = new TestDurationArtifacts(); - ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); - - // Try getting artifacts for our branch, if not, try the target branch. - if (!testArtifacts.get(getBranchTag(), outputStream)) { - outputStream = new ByteArrayOutputStream(); - LOG.warn("Could not get tests from Artifactory for tag {}, trying {}", getBranchTag(), getTargetBranchTag()); - if (!testArtifacts.get(getTargetBranchTag(), outputStream)) { - LOG.warn("Could not get any tests from Artifactory"); - return tests; - } - } - - ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray()); - addTestsFromZippedCsv(tests, inputStream); - LOG.warn("Got {} tests from Artifactory", tests.size()); - return tests; - } catch (Exception e) { // was IOException - LOG.warn(e.toString()); - LOG.warn("Could not get tests from Artifactory"); - return tests; - } - } - - /** - * Get tests for the specified tag in the outputStream - * - * @param theTag tag for tests - * @param outputStream stream of zipped xml files - * @return false if we fail to get the tests - */ - private boolean get(@NotNull final String theTag, @NotNull final OutputStream outputStream) { - return artifactory.get(BASE_URL, theTag, ARTIFACT, "zip", outputStream); - } - - /** - * Upload the supplied tests - * - * @param theTag tag for tests - * @param inputStream stream of zipped xml files. - * @return true if we succeed - */ - private boolean put(@NotNull final String theTag, @NotNull final InputStream inputStream) { - return artifactory.put(BASE_URL, theTag, ARTIFACT, EXTENSION, inputStream); - } -} - diff --git a/buildSrc/src/main/groovy/net/corda/testing/Tests.java b/buildSrc/src/main/groovy/net/corda/testing/Tests.java deleted file mode 100644 index 26e01e1db0..0000000000 --- a/buildSrc/src/main/groovy/net/corda/testing/Tests.java +++ /dev/null @@ -1,213 +0,0 @@ -package net.corda.testing; - -import groovy.lang.Tuple2; -import groovy.lang.Tuple3; -import org.apache.commons.csv.CSVFormat; -import org.apache.commons.csv.CSVPrinter; -import org.apache.commons.csv.CSVRecord; -import org.jetbrains.annotations.NotNull; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.io.Reader; -import java.io.Writer; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.stream.Collectors; - -public class Tests { - final static String TEST_NAME = "Test Name"; - final static String MEAN_DURATION_NANOS = "Mean Duration Nanos"; - final static String NUMBER_OF_RUNS = "Number of runs"; - private static final Logger LOG = LoggerFactory.getLogger(Tests.class); - // test name -> (mean duration, number of runs) - private final Map> tests = new HashMap<>(); - // If we don't have any tests from which to get a mean, use this. - static long DEFAULT_MEAN_NANOS = 1000L; - - private static Tuple2 DEFAULT_MEAN_TUPLE = new Tuple2<>(DEFAULT_MEAN_NANOS, 0L); - // mean, count - private Tuple2 meanForTests = DEFAULT_MEAN_TUPLE; - - /** - * Read tests, mean duration and runs from a csv file. - * - * @param reader a reader - * @return list of tests, or an empty list if none or we have a problem. - */ - public static List> read(Reader reader) { - try { - List records = CSVFormat.DEFAULT.withHeader().parse(reader).getRecords(); - return records.stream().map(record -> { - try { - final String testName = record.get(TEST_NAME); - final long testDuration = Long.parseLong(record.get(MEAN_DURATION_NANOS)); - final long testRuns = Long.parseLong(record.get(NUMBER_OF_RUNS)); // can't see how we would have zero tbh. - return new Tuple3<>(testName, testDuration, Math.max(testRuns, 1)); - } catch (IllegalArgumentException | IllegalStateException e) { - return null; - } - }).filter(Objects::nonNull).sorted(Comparator.comparing(Tuple3::getFirst)).collect(Collectors.toList()); - } catch (IOException ignored) { - - } - return Collections.emptyList(); - } - - private static Tuple2 recalculateMean(@NotNull final Tuple2 previous, long nanos) { - final long total = previous.getFirst() * previous.getSecond() + nanos; - final long count = previous.getSecond() + 1; - return new Tuple2<>(total / count, count); - } - - /** - * Write a csv file of test name, duration, runs - * - * @param writer a writer - * @return true if no problems. - */ - public boolean write(@NotNull final Writer writer) { - boolean ok = true; - final CSVPrinter printer; - try { - printer = new CSVPrinter(writer, - CSVFormat.DEFAULT.withHeader(TEST_NAME, MEAN_DURATION_NANOS, NUMBER_OF_RUNS)); - for (String key : tests.keySet()) { - printer.printRecord(key, tests.get(key).getFirst(), tests.get(key).getSecond()); - } - - printer.flush(); - } catch (IOException e) { - ok = false; - } - return ok; - } - - /** - * Add tests, and also (re)calculate the mean test duration. - * e.g. addTests(read(reader)); - * - * @param testsCollection tests, typically from a csv file. - */ - public void addTests(@NotNull final List> testsCollection) { - testsCollection.forEach(t -> this.tests.put(t.getFirst(), new Tuple2<>(t.getSecond(), t.getThird()))); - - // Calculate the mean test time. - if (tests.size() > 0) { - long total = 0; - for (String testName : this.tests.keySet()) total += tests.get(testName).getFirst(); - meanForTests = new Tuple2<>(total / this.tests.size(), 1L); - } - } - - /** - * Get the known mean duration of a test. - * - * @param testName the test name - * @return duration in nanos. - */ - public long getDuration(@NotNull final String testName) { - return tests.getOrDefault(testName, meanForTests).getFirst(); - } - - /** - * Add test information. Recalulates mean test duration if already exists. - * - * @param testName name of the test - * @param durationNanos duration - */ - public void addDuration(@NotNull final String testName, long durationNanos) { - final Tuple2 current = tests.getOrDefault(testName, new Tuple2<>(0L, 0L)); - - tests.put(testName, recalculateMean(current, durationNanos)); - - LOG.debug("Recorded test '{}', mean={} ns, runs={}", testName, tests.get(testName).getFirst(), tests.get(testName).getSecond()); - - meanForTests = recalculateMean(meanForTests, durationNanos); - } - - /** - * Do we have any test information? - * - * @return false if no tests info - */ - public boolean isEmpty() { - return tests.isEmpty(); - } - - /** - * How many tests do we have? - * - * @return the number of tests we have information for - */ - public int size() { - return tests.size(); - } - - /** - * Return all tests (and their durations) that being with (or are equal to) `testPrefix` - * If not present we just return the mean test duration so that the test is fairly distributed. - * @param testPrefix could be just the classname, or the entire classname + testname. - * @return list of matching tests - */ - @NotNull - List> startsWith(@NotNull final String testPrefix) { - List> results = this.tests.keySet().stream() - .filter(t -> t.startsWith(testPrefix)) - .map(t -> new Tuple2<>(t, getDuration(t))) - .collect(Collectors.toList()); - // We don't know if the testPrefix is a classname or classname.methodname (exact match). - if (results == null || results.isEmpty()) { - LOG.warn("In {} previously executed tests, could not find any starting with {}", tests.size(), testPrefix); - results = Arrays.asList(new Tuple2<>(testPrefix, getMeanDurationForTests())); - } - return results; - } - - @NotNull - List> equals(@NotNull final String testPrefix) { - List> results = this.tests.keySet().stream() - .filter(t -> t.equals(testPrefix)) - .map(t -> new Tuple2<>(t, getDuration(t))) - .collect(Collectors.toList()); - // We don't know if the testPrefix is a classname or classname.methodname (exact match). - if (results == null || results.isEmpty()) { - LOG.warn("In {} previously executed tests, could not find any starting with {}", tests.size(), testPrefix); - results = Arrays.asList(new Tuple2<>(testPrefix, getMeanDurationForTests())); - } - return results; - } - - /** - * How many times has this function been run? Every call to addDuration increments the current value. - * - * @param testName the test name - * @return the number of times the test name has been run. - */ - public long getRunCount(@NotNull final String testName) { - return tests.getOrDefault(testName, new Tuple2<>(0L, 0L)).getSecond(); - } - - /** - * Return the mean duration for a unit to run - * - * @return mean duration in nanos. - */ - public long getMeanDurationForTests() { - return meanForTests.getFirst(); - } - - /** - * Clear all tests - */ - void clear() { - tests.clear(); - meanForTests = DEFAULT_MEAN_TUPLE; - } -} \ No newline at end of file diff --git a/buildSrc/src/main/java/net/corda/testing/KubePodResult.java b/buildSrc/src/main/java/net/corda/testing/KubePodResult.java deleted file mode 100644 index 76ba668c39..0000000000 --- a/buildSrc/src/main/java/net/corda/testing/KubePodResult.java +++ /dev/null @@ -1,35 +0,0 @@ -package net.corda.testing; - -import java.io.File; -import java.util.Collection; - -public class KubePodResult { - - private final int podIndex; - private final int resultCode; - private final File output; - private final Collection binaryResults; - - public KubePodResult(int podIndex, int resultCode, File output, Collection binaryResults) { - this.podIndex = podIndex; - this.resultCode = resultCode; - this.output = output; - this.binaryResults = binaryResults; - } - - public int getResultCode() { - return resultCode; - } - - public File getOutput() { - return output; - } - - public Collection getBinaryResults() { - return binaryResults; - } - - public int getPodIndex() { - return podIndex; - } -} diff --git a/buildSrc/src/main/java/net/corda/testing/KubesReporting.java b/buildSrc/src/main/java/net/corda/testing/KubesReporting.java deleted file mode 100644 index 4f8f8aee0c..0000000000 --- a/buildSrc/src/main/java/net/corda/testing/KubesReporting.java +++ /dev/null @@ -1,194 +0,0 @@ -/* - * Copyright 2012 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package net.corda.testing; - -import org.apache.commons.compress.utils.IOUtils; -import org.gradle.api.DefaultTask; -import org.gradle.api.GradleException; -import org.gradle.api.Transformer; -import org.gradle.api.file.FileCollection; -import org.gradle.api.internal.file.UnionFileCollection; -import org.gradle.api.internal.tasks.testing.junit.result.AggregateTestResultsProvider; -import org.gradle.api.internal.tasks.testing.junit.result.BinaryResultBackedTestResultsProvider; -import org.gradle.api.internal.tasks.testing.junit.result.TestResultsProvider; -import org.gradle.api.internal.tasks.testing.report.DefaultTestReport; -import org.gradle.api.tasks.OutputDirectory; -import org.gradle.api.tasks.TaskAction; -import org.gradle.api.tasks.testing.Test; -import org.gradle.internal.logging.ConsoleRenderer; -import org.gradle.internal.operations.BuildOperationExecutor; - -import javax.inject.Inject; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.util.ArrayList; -import java.util.LinkedList; -import java.util.List; -import java.util.stream.Collectors; - -import static org.gradle.internal.concurrent.CompositeStoppable.stoppable; -import static org.gradle.util.CollectionUtils.collect; - -/** - * Shameful copy of org.gradle.api.tasks.testing.TestReport - modified to handle results from k8s testing. - * see https://docs.gradle.org/current/dsl/org.gradle.api.tasks.testing.TestReport.html - */ -public class KubesReporting extends DefaultTask { - private File destinationDir = new File(getProject().getBuildDir(), "test-reporting"); - private List results = new ArrayList(); - List podResults = new ArrayList<>(); - boolean shouldPrintOutput = true; - - public KubesReporting() { - //force this task to always run, as it's responsible for parsing exit codes - getOutputs().upToDateWhen(t -> false); - } - - @Inject - protected BuildOperationExecutor getBuildOperationExecutor() { - throw new UnsupportedOperationException(); - } - - /** - * Returns the directory to write the HTML report to. - */ - @OutputDirectory - public File getDestinationDir() { - return destinationDir; - } - - /** - * Sets the directory to write the HTML report to. - */ - public void setDestinationDir(File destinationDir) { - this.destinationDir = destinationDir; - } - - /** - * Returns the set of binary test results to include in the report. - */ - public FileCollection getTestResultDirs() { - UnionFileCollection dirs = new UnionFileCollection(); - for (Object result : results) { - addTo(result, dirs); - } - return dirs; - } - - private void addTo(Object result, UnionFileCollection dirs) { - if (result instanceof Test) { - Test test = (Test) result; - dirs.addToUnion(getProject().files(test.getBinResultsDir()).builtBy(test)); - } else if (result instanceof Iterable) { - Iterable iterable = (Iterable) result; - for (Object nested : iterable) { - addTo(nested, dirs); - } - } else { - dirs.addToUnion(getProject().files(result)); - } - } - - /** - * Sets the binary test results to use to include in the report. Each entry must point to a binary test results directory generated by a {@link Test} - * task. - */ - public void setTestResultDirs(Iterable testResultDirs) { - this.results.clear(); - reportOn(testResultDirs); - } - - /** - * Adds some results to include in the report. - * - *

This method accepts any parameter of the given types: - * - *

    - * - *
  • A {@link Test} task instance. The results from the test task are included in the report. The test task is automatically added - * as a dependency of this task.
  • - * - *
  • Anything that can be converted to a set of {@link File} instances as per {@link org.gradle.api.Project#files(Object...)}. These must - * point to the binary test results directory generated by a {@link Test} task instance.
  • - * - *
  • An {@link Iterable}. The contents of the iterable are converted recursively.
  • - * - *
- * - * @param results The result objects. - */ - public void reportOn(Object... results) { - for (Object result : results) { - this.results.add(result); - } - } - - @TaskAction - void generateReport() { - TestResultsProvider resultsProvider = createAggregateProvider(); - try { - if (resultsProvider.isHasResults()) { - DefaultTestReport testReport = new DefaultTestReport(getBuildOperationExecutor()); - testReport.generateReport(resultsProvider, getDestinationDir()); - List containersWithNonZeroReturnCodes = podResults.stream() - .filter(result -> result.getResultCode() != 0) - .collect(Collectors.toList()); - - if (!containersWithNonZeroReturnCodes.isEmpty()) { - String reportUrl = new ConsoleRenderer().asClickableFileUrl(new File(destinationDir, "index.html")); - if (shouldPrintOutput) { - containersWithNonZeroReturnCodes.forEach(podResult -> { - try { - System.out.println("\n##### CONTAINER " + podResult.getPodIndex() + " OUTPUT START #####"); - IOUtils.copy(new FileInputStream(podResult.getOutput()), System.out); - System.out.println("##### CONTAINER " + podResult.getPodIndex() + " OUTPUT END #####\n"); - } catch (IOException ignored) { - } - }); - } - String message = "remote build failed, check test report at " + reportUrl; - throw new GradleException(message); - } - } else { - getLogger().info("{} - no binary test results found in dirs: {}.", getPath(), getTestResultDirs().getFiles()); - setDidWork(false); - } - } finally { - stoppable(resultsProvider).stop(); - } - } - - public TestResultsProvider createAggregateProvider() { - List resultsProviders = new LinkedList(); - try { - FileCollection resultDirs = getTestResultDirs(); - if (resultDirs.getFiles().size() == 1) { - return new BinaryResultBackedTestResultsProvider(resultDirs.getSingleFile()); - } else { - return new AggregateTestResultsProvider(collect(resultDirs, resultsProviders, new Transformer() { - public TestResultsProvider transform(File dir) { - return new BinaryResultBackedTestResultsProvider(dir); - } - })); - } - } catch (RuntimeException e) { - stoppable(resultsProviders).stop(); - throw e; - } - } -} diff --git a/buildSrc/src/main/java/net/corda/testing/retry/Retry.java b/buildSrc/src/main/java/net/corda/testing/retry/Retry.java deleted file mode 100644 index 9b919963c5..0000000000 --- a/buildSrc/src/main/java/net/corda/testing/retry/Retry.java +++ /dev/null @@ -1,48 +0,0 @@ -package net.corda.testing.retry; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.concurrent.Callable; - -public final class Retry { - private static final Logger log = LoggerFactory.getLogger(Retry.class); - - public interface RetryStrategy { - T run(Callable op) throws RetryException; - } - - public static final class RetryException extends RuntimeException { - public RetryException(String message) { - super(message); - } - - public RetryException(String message, Throwable cause) { - super(message, cause); - } - } - - public static RetryStrategy fixed(int times) { - if (times < 1) throw new IllegalArgumentException(); - return new RetryStrategy() { - @Override - public T run(Callable op) { - int run = 0; - Exception last = null; - while (run < times) { - try { - return op.call(); - } catch (Exception e) { - last = e; - log.info("Exception caught: " + e.getMessage()); - } - run++; - } - throw new RetryException("Operation failed " + run + " times", last); - } - }; - } -} - - - diff --git a/buildSrc/src/test/groovy/net/corda/testing/ListTestsTest.java b/buildSrc/src/test/groovy/net/corda/testing/ListTestsTest.java deleted file mode 100644 index c45745d5dd..0000000000 --- a/buildSrc/src/test/groovy/net/corda/testing/ListTestsTest.java +++ /dev/null @@ -1,43 +0,0 @@ -package net.corda.testing; - -import org.hamcrest.CoreMatchers; -import org.junit.Assert; -import org.junit.Test; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.IntStream; - -import static org.hamcrest.core.Is.is; -import static org.hamcrest.core.IsEqual.equalTo; - -public class ListTestsTest { - - @Test - public void shouldAllocateTests() { - - for (int numberOfTests = 0; numberOfTests < 100; numberOfTests++) { - for (int numberOfForks = 1; numberOfForks < 100; numberOfForks++) { - - - List tests = IntStream.range(0, numberOfTests).boxed() - .map(integer -> "Test.method" + integer.toString()) - .collect(Collectors.toList()); - ListShufflerAndAllocator testLister = new ListShufflerAndAllocator(tests); - - List listOfLists = new ArrayList<>(); - for (int fork = 0; fork < numberOfForks; fork++) { - listOfLists.addAll(testLister.getTestsForFork(fork, numberOfForks, 0)); - } - - Assert.assertThat(listOfLists.size(), CoreMatchers.is(tests.size())); - Assert.assertThat(new HashSet<>(listOfLists).size(), CoreMatchers.is(tests.size())); - Assert.assertThat(listOfLists.stream().sorted().collect(Collectors.toList()), is(equalTo(tests.stream().sorted().collect(Collectors.toList())))); - } - } - - } - -} diff --git a/buildSrc/src/test/groovy/net/corda/testing/PropertiesTest.java b/buildSrc/src/test/groovy/net/corda/testing/PropertiesTest.java deleted file mode 100644 index 599c5f8d4a..0000000000 --- a/buildSrc/src/test/groovy/net/corda/testing/PropertiesTest.java +++ /dev/null @@ -1,63 +0,0 @@ -package net.corda.testing; - -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -public class PropertiesTest { - private static String username = "me"; - private static String password = "me"; - private static String cordaType = "corda-project"; - private static String branch = "mine"; - private static String targetBranch = "master"; - - @Before - public void setUp() { - System.setProperty("git.branch", branch); - System.setProperty("git.target.branch", targetBranch); - System.setProperty("artifactory.username", username); - System.setProperty("artifactory.password", password); - } - - @After - public void tearDown() { - System.setProperty("git.branch", ""); - System.setProperty("git.target.branch", ""); - System.setProperty("artifactory.username", ""); - System.setProperty("artifactory.password", ""); - } - - @Test - public void cordaType() { - Properties.setRootProjectType(cordaType); - Assert.assertEquals(cordaType, Properties.getRootProjectType()); - } - - @Test - public void getUsername() { - Assert.assertEquals(username, Properties.getUsername()); - } - - @Test - public void getPassword() { - Assert.assertEquals(password, Properties.getPassword()); - } - - @Test - public void getGitBranch() { - Assert.assertEquals(branch, Properties.getGitBranch()); - } - - @Test - public void getTargetGitBranch() { - Assert.assertEquals(targetBranch, Properties.getTargetGitBranch()); - } - - @Test - public void getPublishJunitTests() { - Assert.assertFalse(Properties.getPublishJunitTests()); - System.setProperty("publish.junit", "true"); - Assert.assertTrue(Properties.getPublishJunitTests()); - } -} \ No newline at end of file diff --git a/buildSrc/src/test/groovy/net/corda/testing/TestDurationArtifactsTest.java b/buildSrc/src/test/groovy/net/corda/testing/TestDurationArtifactsTest.java deleted file mode 100644 index c2a079771d..0000000000 --- a/buildSrc/src/test/groovy/net/corda/testing/TestDurationArtifactsTest.java +++ /dev/null @@ -1,323 +0,0 @@ -package net.corda.testing; - -import groovy.lang.Tuple2; -import org.apache.commons.compress.archivers.ArchiveException; -import org.apache.commons.compress.archivers.ArchiveOutputStream; -import org.apache.commons.compress.archivers.ArchiveStreamFactory; -import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; -import org.jetbrains.annotations.NotNull; -import org.junit.Assert; -import org.junit.Test; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.StringWriter; -import java.nio.charset.StandardCharsets; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.List; -import java.util.stream.Collectors; -import java.util.zip.ZipEntry; -import java.util.zip.ZipOutputStream; - -public class TestDurationArtifactsTest { - final static String CLASSNAME = "FAKE"; - - String getXml(List> tests) { - StringBuilder sb = new StringBuilder(); - sb.append("\n" + - "\n" + - " \n" + - " \n" + - " \n" + - " \n"); - - for (Tuple2 test : tests) { - Double d = ((double) test.getSecond()) / 1_000_000_000.0; - sb.append(" \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n"); - } - - sb.append(" \n" + - " \n" + - " \n" + - ""); - return sb.toString(); - } - - String getXmlWithNoTime(List> tests) { - StringBuilder sb = new StringBuilder(); - sb.append("\n" + - "\n" + - " \n" + - " \n" + - " \n" + - " \n"); - - for (Tuple2 test : tests) { - Double d = ((double) test.getSecond()) / 1_000_000_000.0; - sb.append(" \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n" + - " \n"); - } - - sb.append(" \n" + - " \n" + - " \n" + - ""); - return sb.toString(); - } - - @Test - public void fromJunitXml() { - List> tests = new ArrayList<>(); - tests.add(new Tuple2<>("TEST-A", 111_000_000_000L)); - tests.add(new Tuple2<>("TEST-B", 222_200_000_000L)); - final String xml = getXml(tests); - - List> results - = TestDurationArtifacts.fromJunitXml(new ByteArrayInputStream(xml.getBytes(StandardCharsets.UTF_8))); - - Assert.assertNotNull(results); - - Assert.assertFalse("Should have results", results.isEmpty()); - Assert.assertEquals(results.size(), 2); - Assert.assertEquals(CLASSNAME + "." + "TEST-A", results.get(0).getFirst()); - Assert.assertEquals(111_000_000_000L, results.get(0).getSecond().longValue()); - Assert.assertEquals(CLASSNAME + "." + "TEST-B", results.get(1).getFirst()); - Assert.assertEquals(222_200_000_000L, results.get(1).getSecond().longValue()); - } - - @Test - public void fromJunitXmlWithZeroDuration() { - // We do return zero values. - List> tests = new ArrayList<>(); - tests.add(new Tuple2<>("TEST-A", 0L)); - tests.add(new Tuple2<>("TEST-B", 0L)); - final String xml = getXml(tests); - - List> results - = TestDurationArtifacts.fromJunitXml(new ByteArrayInputStream(xml.getBytes(StandardCharsets.UTF_8))); - - Assert.assertNotNull(results); - - Assert.assertFalse("Should have results", results.isEmpty()); - Assert.assertEquals(results.size(), 2); - Assert.assertEquals(CLASSNAME + "." + "TEST-A", results.get(0).getFirst()); - Assert.assertEquals(0L, results.get(0).getSecond().longValue()); - Assert.assertEquals(CLASSNAME + "." + "TEST-B", results.get(1).getFirst()); - Assert.assertEquals(0L, results.get(1).getSecond().longValue()); - } - - @Test - public void fromJunitXmlWithNoDuration() { - // We do return zero values. - List> tests = new ArrayList<>(); - tests.add(new Tuple2<>("TEST-A", 0L)); - tests.add(new Tuple2<>("TEST-B", 0L)); - final String xml = getXmlWithNoTime(tests); - - List> results - = TestDurationArtifacts.fromJunitXml(new ByteArrayInputStream(xml.getBytes(StandardCharsets.UTF_8))); - - Assert.assertNotNull(results); - - Assert.assertFalse("Should have results", results.isEmpty()); - Assert.assertEquals(2, results.size()); - Assert.assertEquals(CLASSNAME + "." + "TEST-A", results.get(0).getFirst()); - Assert.assertEquals(0L, results.get(0).getSecond().longValue()); - Assert.assertEquals(CLASSNAME + "." + "TEST-B", results.get(1).getFirst()); - Assert.assertEquals(0L, results.get(1).getSecond().longValue()); - } - - @Test - public void canCreateZipFile() throws IOException { - Tests outputTests = new Tests(); - final String testA = "com.corda.testA"; - final String testB = "com.corda.testB"; - outputTests.addDuration(testA, 55L); - outputTests.addDuration(testB, 33L); - - StringWriter writer = new StringWriter(); - outputTests.write(writer); - String csv = writer.toString(); - - ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); - try (ZipOutputStream outputStream = new ZipOutputStream(byteStream, StandardCharsets.UTF_8)) { - ZipEntry entry = new ZipEntry("tests.csv"); - outputStream.putNextEntry(entry); - outputStream.write(csv.getBytes(StandardCharsets.UTF_8)); - outputStream.closeEntry(); - } - Assert.assertNotEquals(0, byteStream.toByteArray().length); - - ByteArrayInputStream inputStream = new ByteArrayInputStream(byteStream.toByteArray()); - Tests tests = new Tests(); - Assert.assertTrue(tests.isEmpty()); - - TestDurationArtifacts.addTestsFromZippedCsv(tests, inputStream); - - Assert.assertFalse(tests.isEmpty()); - Assert.assertEquals(2, tests.size()); - Assert.assertEquals(55L, tests.getDuration(testA)); - Assert.assertEquals(33L, tests.getDuration(testB)); - - Assert.assertEquals(44L, tests.getMeanDurationForTests()); - } - - void putIntoArchive(@NotNull final ArchiveOutputStream outputStream, - @NotNull final String fileName, - @NotNull final String content) throws IOException { - ZipArchiveEntry entry = new ZipArchiveEntry(fileName); - outputStream.putArchiveEntry(entry); - outputStream.write(content.getBytes(StandardCharsets.UTF_8)); - outputStream.closeArchiveEntry(); - } - - String write(@NotNull final Tests tests) { - - StringWriter writer = new StringWriter(); - tests.write(writer); - return writer.toString(); - } - - @Test - public void canCreateZipFileContainingMultipleFiles() throws IOException, ArchiveException { - // Currently we don't have two csvs in the zip file, but test anyway. - - Tests outputTests = new Tests(); - final String testA = "com.corda.testA"; - final String testB = "com.corda.testB"; - final String testC = "com.corda.testC"; - outputTests.addDuration(testA, 55L); - outputTests.addDuration(testB, 33L); - - String csv = write(outputTests); - - Tests otherTests = new Tests(); - otherTests.addDuration(testA, 55L); - otherTests.addDuration(testB, 33L); - otherTests.addDuration(testC, 22L); - String otherCsv = write(otherTests); - - ByteArrayOutputStream byteStream = new ByteArrayOutputStream(); - try (ArchiveOutputStream outputStream = - new ArchiveStreamFactory("UTF-8").createArchiveOutputStream(ArchiveStreamFactory.ZIP, byteStream)) { - putIntoArchive(outputStream, "tests1.csv", csv); - putIntoArchive(outputStream, "tests2.csv", otherCsv); - outputStream.flush(); - } - - Assert.assertNotEquals(0, byteStream.toByteArray().length); - ByteArrayInputStream inputStream = new ByteArrayInputStream(byteStream.toByteArray()); - - Tests tests = new Tests(); - Assert.assertTrue(tests.isEmpty()); - - TestDurationArtifacts.addTestsFromZippedCsv(tests, inputStream); - - Assert.assertFalse(tests.isEmpty()); - Assert.assertEquals(3, tests.size()); - Assert.assertEquals((55 + 33 + 22) / 3, tests.getMeanDurationForTests()); - } - -// // Uncomment to test a file. -// // Run a build to generate some test files, create a zip: -// // zip ~/tests.zip $(find . -name "*.xml" -type f | grep test-results) -//// @Test -//// public void testZipFile() throws FileNotFoundException { -//// File f = new File(System.getProperty("tests.zip", "/tests.zip"); -//// List> results = BucketingAllocatorTask.fromZippedXml(new BufferedInputStream(new FileInputStream(f))); -//// Assert.assertFalse("Should have results", results.isEmpty()); -//// System.out.println("Results = " + results.size()); -//// System.out.println(results.toString()); -//// } - - - @Test - public void branchNamesDoNotHaveDirectoryDelimiters() { - // we use the branch name in file and artifact tagging, so '/' would confuse things, - // so make sure when we retrieve the property we strip them out. - - final String expected = "release/os/4.3"; - final String key = "git.branch"; - final String cordaType = "corda"; - Properties.setRootProjectType(cordaType); - System.setProperty(key, expected); - - Assert.assertEquals(expected, System.getProperty(key)); - Assert.assertNotEquals(expected, Properties.getGitBranch()); - Assert.assertEquals("release-os-4.3", Properties.getGitBranch()); - } - - @Test - public void getTestsFromArtifactory() { - String artifactory_password = System.getenv("ARTIFACTORY_PASSWORD"); - String artifactory_username = System.getenv("ARTIFACTORY_USERNAME"); - String git_branch = System.getenv("CORDA_GIT_BRANCH"); - String git_target_branch = System.getenv("CORDA_GIT_TARGET_BRANCH"); - - if (artifactory_password == null || - artifactory_username == null || - git_branch == null || - git_target_branch == null - ) { - System.out.println("Skipping test - set env vars to run this test"); - return; - } - - System.setProperty("git.branch", git_branch); - System.setProperty("git.target.branch", git_target_branch); - System.setProperty("artifactory.password", artifactory_password); - System.setProperty("artifactory.username", artifactory_username); - Assert.assertTrue(TestDurationArtifacts.tests.isEmpty()); - TestDurationArtifacts.loadTests(); - Assert.assertFalse(TestDurationArtifacts.tests.isEmpty()); - } - - @Test - public void tryAndWalkForTestXmlFiles() { - final String xmlRoot = System.getenv("JUNIT_XML_ROOT"); - if (xmlRoot == null) { - System.out.println("Set JUNIT_XML_ROOT to run this test"); - return; - } - - List testXmlFiles = TestDurationArtifacts.getTestXmlFiles(Paths.get(xmlRoot)); - Assert.assertFalse(testXmlFiles.isEmpty()); - - for (Path testXmlFile : testXmlFiles.stream().sorted().collect(Collectors.toList())) { - // System.out.println(testXmlFile.toString()); - } - - System.out.println("\n\nTESTS\n\n"); - for (Path testResult : testXmlFiles) { - try { - final List> unitTests = TestDurationArtifacts.fromJunitXml(new FileInputStream(testResult.toFile())); - for (Tuple2 unitTest : unitTests) { - System.out.println(unitTest.getFirst() + " --> " + BucketingAllocator.getDuration(unitTest.getSecond())); - } - - } catch (FileNotFoundException e) { - e.printStackTrace(); - } - } - } -} diff --git a/buildSrc/src/test/groovy/net/corda/testing/TestsTest.java b/buildSrc/src/test/groovy/net/corda/testing/TestsTest.java deleted file mode 100644 index 9fdce14292..0000000000 --- a/buildSrc/src/test/groovy/net/corda/testing/TestsTest.java +++ /dev/null @@ -1,145 +0,0 @@ -package net.corda.testing; - -import org.junit.Assert; -import org.junit.Test; - -import java.io.StringReader; -import java.io.StringWriter; - -public class TestsTest { - @Test - public void read() { - final Tests tests = new Tests(); - Assert.assertTrue(tests.isEmpty()); - - final String s = Tests.TEST_NAME + "," + Tests.MEAN_DURATION_NANOS + "," + Tests.NUMBER_OF_RUNS + '\n' - + "hello,100,4\n"; - tests.addTests(Tests.read(new StringReader(s))); - - Assert.assertFalse(tests.isEmpty()); - Assert.assertEquals((long) tests.getDuration("hello"), 100); - } - - @Test - public void write() { - final StringWriter writer = new StringWriter(); - final Tests tests = new Tests(); - Assert.assertTrue(tests.isEmpty()); - tests.addDuration("hello", 100); - tests.write(writer); - Assert.assertFalse(tests.isEmpty()); - - final StringReader reader = new StringReader(writer.toString()); - final Tests otherTests = new Tests(); - otherTests.addTests(Tests.read(reader)); - - Assert.assertFalse(tests.isEmpty()); - Assert.assertFalse(otherTests.isEmpty()); - Assert.assertEquals(tests.size(), otherTests.size()); - Assert.assertEquals(tests.getDuration("hello"), otherTests.getDuration("hello")); - } - - @Test - public void addingTestChangesMeanDuration() { - final Tests tests = new Tests(); - final String s = Tests.TEST_NAME + "," + Tests.MEAN_DURATION_NANOS + "," + Tests.NUMBER_OF_RUNS + '\n' - + "hello,100,4\n"; - tests.addTests(Tests.read(new StringReader(s))); - - Assert.assertFalse(tests.isEmpty()); - // 400 total for 4 tests - Assert.assertEquals((long) tests.getDuration("hello"), 100); - - // 1000 total for 5 tests = 200 mean - tests.addDuration("hello", 600); - Assert.assertEquals((long) tests.getDuration("hello"), 200); - } - - @Test - public void addTests() { - final Tests tests = new Tests(); - Assert.assertTrue(tests.isEmpty()); - - final String s = Tests.TEST_NAME + "," + Tests.MEAN_DURATION_NANOS + "," + Tests.NUMBER_OF_RUNS + '\n' - + "hello,100,4\n" - + "goodbye,200,4\n"; - - tests.addTests(Tests.read(new StringReader(s))); - Assert.assertFalse(tests.isEmpty()); - Assert.assertEquals(tests.size(), 2); - } - - @Test - public void getDuration() { - final Tests tests = new Tests(); - Assert.assertTrue(tests.isEmpty()); - - final String s = Tests.TEST_NAME + "," + Tests.MEAN_DURATION_NANOS + "," + Tests.NUMBER_OF_RUNS + '\n' - + "hello,100,4\n" - + "goodbye,200,4\n"; - - tests.addTests(Tests.read(new StringReader(s))); - Assert.assertFalse(tests.isEmpty()); - Assert.assertEquals(tests.size(), 2); - - Assert.assertEquals(100L, tests.getDuration("hello")); - Assert.assertEquals(200L, tests.getDuration("goodbye")); - } - - @Test - public void addTestInfo() { - final Tests tests = new Tests(); - Assert.assertTrue(tests.isEmpty()); - - final String s = Tests.TEST_NAME + "," + Tests.MEAN_DURATION_NANOS + "," + Tests.NUMBER_OF_RUNS + '\n' - + "hello,100,4\n" - + "goodbye,200,4\n"; - - tests.addTests(Tests.read(new StringReader(s))); - Assert.assertFalse(tests.isEmpty()); - Assert.assertEquals(2, tests.size()); - - tests.addDuration("foo", 55); - tests.addDuration("bar", 33); - Assert.assertEquals(4, tests.size()); - - tests.addDuration("bar", 56); - Assert.assertEquals(4, tests.size()); - } - - @Test - public void addingNewDurationUpdatesRunCount() { - - final Tests tests = new Tests(); - Assert.assertTrue(tests.isEmpty()); - - final String s = Tests.TEST_NAME + "," + Tests.MEAN_DURATION_NANOS + "," + Tests.NUMBER_OF_RUNS + '\n' - + "hello,100,4\n" - + "goodbye,200,4\n"; - - tests.addTests(Tests.read(new StringReader(s))); - Assert.assertFalse(tests.isEmpty()); - Assert.assertEquals(2, tests.size()); - - tests.addDuration("foo", 55); - - Assert.assertEquals(0, tests.getRunCount("bar")); - - tests.addDuration("bar", 33); - Assert.assertEquals(4, tests.size()); - - tests.addDuration("bar", 56); - Assert.assertEquals(2, tests.getRunCount("bar")); - Assert.assertEquals(4, tests.size()); - - tests.addDuration("bar", 56); - tests.addDuration("bar", 56); - Assert.assertEquals(4, tests.getRunCount("bar")); - - Assert.assertEquals(4, tests.getRunCount("hello")); - tests.addDuration("hello", 22); - tests.addDuration("hello", 22); - tests.addDuration("hello", 22); - Assert.assertEquals(7, tests.getRunCount("hello")); - } -} \ No newline at end of file diff --git a/buildSrc/src/test/java/net/corda/testing/BucketingAllocatorTest.java b/buildSrc/src/test/java/net/corda/testing/BucketingAllocatorTest.java deleted file mode 100644 index a42d023a15..0000000000 --- a/buildSrc/src/test/java/net/corda/testing/BucketingAllocatorTest.java +++ /dev/null @@ -1,178 +0,0 @@ -package net.corda.testing; - -import org.hamcrest.collection.IsIterableContainingInAnyOrder; -import org.junit.Assert; -import org.junit.Ignore; -import org.junit.Test; - -import java.util.Arrays; -import java.util.Collection; -import java.util.List; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import static org.hamcrest.CoreMatchers.is; - -public class BucketingAllocatorTest { - - @Test - public void shouldAlwaysBucketTestsEvenIfNotInTimedFile() { - Tests tests = new Tests(); - BucketingAllocator bucketingAllocator = new BucketingAllocator(1, () -> tests); - - Object task = new Object(); - bucketingAllocator.addSource(() -> Arrays.asList("SomeTestingClass", "AnotherTestingClass"), task); - - bucketingAllocator.generateTestPlan(); - List testsForForkAndTestTask = bucketingAllocator.getTestsForForkAndTestTask(0, task); - - Assert.assertThat(testsForForkAndTestTask, IsIterableContainingInAnyOrder.containsInAnyOrder("SomeTestingClass", "AnotherTestingClass")); - - List forkContainers = bucketingAllocator.getForkContainers(); - Assert.assertEquals(1, forkContainers.size()); - // There aren't any known tests, so it will use the default instead. - Assert.assertEquals(Tests.DEFAULT_MEAN_NANOS, tests.getMeanDurationForTests()); - Assert.assertEquals(2 * tests.getMeanDurationForTests(), forkContainers.get(0).getCurrentDuration().longValue()); - } - - @Test - public void shouldAlwaysBucketTestsEvenIfNotInTimedFileAndUseMeanValue() { - final Tests tests = new Tests(); - tests.addDuration("someRandomTestNameToForceMeanValue", 1_000_000_000); - - BucketingAllocator bucketingAllocator = new BucketingAllocator(1, () -> tests); - - Object task = new Object(); - List testNames = Arrays.asList("SomeTestingClass", "AnotherTestingClass"); - - bucketingAllocator.addSource(() -> testNames, task); - - bucketingAllocator.generateTestPlan(); - List testsForForkAndTestTask = bucketingAllocator.getTestsForForkAndTestTask(0, task); - - Assert.assertThat(testsForForkAndTestTask, IsIterableContainingInAnyOrder.containsInAnyOrder(testNames.toArray())); - - List forkContainers = bucketingAllocator.getForkContainers(); - Assert.assertEquals(1, forkContainers.size()); - Assert.assertEquals(testNames.size() * tests.getMeanDurationForTests(), forkContainers.get(0).getCurrentDuration().longValue()); - } - - @Test - public void shouldAllocateTestsAcrossForksEvenIfNoMatchingTestsFound() { - Tests tests = new Tests(); - tests.addDuration("SomeTestingClass", 1_000_000_000); - tests.addDuration("AnotherTestingClass", 2222); - BucketingAllocator bucketingAllocator = new BucketingAllocator(2, () -> tests); - - Object task = new Object(); - bucketingAllocator.addSource(() -> Arrays.asList("SomeTestingClass", "AnotherTestingClass"), task); - - bucketingAllocator.generateTestPlan(); - List testsForForkOneAndTestTask = bucketingAllocator.getTestsForForkAndTestTask(0, task); - List testsForForkTwoAndTestTask = bucketingAllocator.getTestsForForkAndTestTask(1, task); - - Assert.assertThat(testsForForkOneAndTestTask.size(), is(1)); - Assert.assertThat(testsForForkTwoAndTestTask.size(), is(1)); - - List allTests = Stream.of(testsForForkOneAndTestTask, testsForForkTwoAndTestTask).flatMap(Collection::stream).collect(Collectors.toList()); - - Assert.assertThat(allTests, IsIterableContainingInAnyOrder.containsInAnyOrder("SomeTestingClass", "AnotherTestingClass")); - } - - @Test - public void shouldAllocateTestsAcrossForksEvenIfNoMatchingTestsFoundAndUseExisitingValues() { - Tests tests = new Tests(); - tests.addDuration("SomeTestingClass", 1_000_000_000L); - tests.addDuration("AnotherTestingClass", 3_000_000_000L); - BucketingAllocator bucketingAllocator = new BucketingAllocator(2, () -> tests); - - Object task = new Object(); - bucketingAllocator.addSource(() -> Arrays.asList("YetAnotherTestingClass", "SomeTestingClass", "AnotherTestingClass"), task); - - bucketingAllocator.generateTestPlan(); - List testsForForkOneAndTestTask = bucketingAllocator.getTestsForForkAndTestTask(0, task); - List testsForForkTwoAndTestTask = bucketingAllocator.getTestsForForkAndTestTask(1, task); - - Assert.assertThat(testsForForkOneAndTestTask.size(), is(1)); - Assert.assertThat(testsForForkTwoAndTestTask.size(), is(2)); - - List allTests = Stream.of(testsForForkOneAndTestTask, testsForForkTwoAndTestTask).flatMap(Collection::stream).collect(Collectors.toList()); - - Assert.assertThat(allTests, IsIterableContainingInAnyOrder.containsInAnyOrder("YetAnotherTestingClass", "SomeTestingClass", "AnotherTestingClass")); - - List forkContainers = bucketingAllocator.getForkContainers(); - Assert.assertEquals(2, forkContainers.size()); - // Internally, we should have sorted the tests by decreasing size, so the largest would be added to the first bucket. - Assert.assertEquals(TimeUnit.SECONDS.toNanos(3), forkContainers.get(0).getCurrentDuration().longValue()); - - // At this point, the second bucket is empty. We also know that the test average is 2s (1+3)/2. - // So we should put SomeTestingClass (1s) into this bucket, AND then put the 'unknown' test 'YetAnotherTestingClass' - // into this bucket, using the mean duration = 2s, resulting in 3s. - Assert.assertEquals(TimeUnit.SECONDS.toNanos(3), forkContainers.get(1).getCurrentDuration().longValue()); - } - - @Test - public void testBucketAllocationForSeveralTestsDistributedByClassName() { - Tests tests = new Tests(); - tests.addDuration("SmallTestingClass", 1_000_000_000L); - tests.addDuration("LargeTestingClass", 3_000_000_000L); - tests.addDuration("MediumTestingClass", 2_000_000_000L); - // Gives a nice mean of 2s. - Assert.assertEquals(TimeUnit.SECONDS.toNanos(2), tests.getMeanDurationForTests()); - - BucketingAllocator bucketingAllocator = new BucketingAllocator(4, () -> tests); - - List testNames = Arrays.asList( - "EvenMoreTestingClass", - "YetAnotherTestingClass", - "AndYetAnotherTestingClass", - "OhYesAnotherTestingClass", - "MediumTestingClass", - "SmallTestingClass", - "LargeTestingClass"); - - Object task = new Object(); - bucketingAllocator.addSource(() -> testNames, task); - - // does not preserve order of known tests and unknown tests.... - bucketingAllocator.generateTestPlan(); - - List testsForFork0 = bucketingAllocator.getTestsForForkAndTestTask(0, task); - List testsForFork1 = bucketingAllocator.getTestsForForkAndTestTask(1, task); - List testsForFork2 = bucketingAllocator.getTestsForForkAndTestTask(2, task); - List testsForFork3 = bucketingAllocator.getTestsForForkAndTestTask(3, task); - - Assert.assertThat(testsForFork0.size(), is(1)); - Assert.assertThat(testsForFork1.size(), is(2)); - Assert.assertThat(testsForFork2.size(), is(2)); - Assert.assertThat(testsForFork3.size(), is(2)); - - // This must be true as it is the largest value. - Assert.assertTrue(testsForFork0.contains("LargeTestingClass")); - - List allTests = Stream.of(testsForFork0, testsForFork1, testsForFork2, testsForFork3) - .flatMap(Collection::stream).collect(Collectors.toList()); - - Assert.assertThat(allTests, IsIterableContainingInAnyOrder.containsInAnyOrder(testNames.toArray())); - - List forkContainers = bucketingAllocator.getForkContainers(); - Assert.assertEquals(4, forkContainers.size()); - - long totalDuration = forkContainers.stream().mapToLong(c -> c.getCurrentDuration()).sum(); - Assert.assertEquals(tests.getMeanDurationForTests() * testNames.size(), totalDuration); - - Assert.assertEquals(TimeUnit.SECONDS.toNanos(3), forkContainers.get(0).getCurrentDuration().longValue()); - Assert.assertEquals(TimeUnit.SECONDS.toNanos(4), forkContainers.get(1).getCurrentDuration().longValue()); - Assert.assertEquals(TimeUnit.SECONDS.toNanos(4), forkContainers.get(2).getCurrentDuration().longValue()); - Assert.assertEquals(TimeUnit.SECONDS.toNanos(3), forkContainers.get(3).getCurrentDuration().longValue()); - } - - @Test - public void durationToString() { - Assert.assertEquals("1 mins", BucketingAllocator.getDuration(60_000_000_000L)); - Assert.assertEquals("4 secs", BucketingAllocator.getDuration(4_000_000_000L)); - Assert.assertEquals("400 ms", BucketingAllocator.getDuration(400_000_000L)); - Assert.assertEquals("400000 ns", BucketingAllocator.getDuration(400_000L)); - } -} \ No newline at end of file From 6487844ed5cc462e7fcd071a9770151cfe607a85 Mon Sep 17 00:00:00 2001 From: CaisManai <50658567+CaisManai@users.noreply.github.com> Date: Mon, 25 Nov 2019 14:44:05 +0000 Subject: [PATCH 3/3] Fixing small typo "Double-click the new contract file to open it" should read "Double-click the new flow file to open it" as we're now working on flows. --- docs/source/quickstart-build.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/quickstart-build.rst b/docs/source/quickstart-build.rst index c1c449817b..df0cb68907 100644 --- a/docs/source/quickstart-build.rst +++ b/docs/source/quickstart-build.rst @@ -241,7 +241,7 @@ Step Four: Creating a flow import net.corda.core.transactions.SignedTransaction import net.corda.core.transactions.TransactionBuilder -5. Double-click the new contract file to open it. +5. Double-click the new flow file to open it. 6. Update the name of the ``Initiator`` class to ``CarIssueInitiator``.