mirror of
https://github.com/corda/corda.git
synced 2024-12-24 07:06:44 +00:00
TM-104 Switching the distributed testing plugin to the one released in artifactory (#5764)
* TM-104 switch to using the published plugin * TM-104 switching to artifactory plugin * TM-104 remove unused plugin * TM-104 adding docker plugin * TM-104 adding docker plugin take 2 * add dependencies-dev and set distributed build plugin to changing
This commit is contained in:
parent
d604820de9
commit
d33dbb2ea9
19
build.gradle
19
build.gradle
@ -1,8 +1,6 @@
|
|||||||
import net.corda.testing.DistributeTestsBy
|
import com.r3.testing.DistributeTestsBy
|
||||||
import net.corda.testing.DistributedTesting
|
import com.r3.testing.ParallelTestGroup
|
||||||
import net.corda.testing.ImageBuilding
|
import com.r3.testing.PodLogLevel
|
||||||
import net.corda.testing.ParallelTestGroup
|
|
||||||
import net.corda.testing.PodLogLevel
|
|
||||||
|
|
||||||
import static org.gradle.api.JavaVersion.VERSION_11
|
import static org.gradle.api.JavaVersion.VERSION_11
|
||||||
import static org.gradle.api.JavaVersion.VERSION_1_8
|
import static org.gradle.api.JavaVersion.VERSION_1_8
|
||||||
@ -152,6 +150,9 @@ buildscript {
|
|||||||
maven {
|
maven {
|
||||||
url 'https://kotlin.bintray.com/kotlinx'
|
url 'https://kotlin.bintray.com/kotlinx'
|
||||||
}
|
}
|
||||||
|
maven {
|
||||||
|
url "https://ci-artifactory.corda.r3cev.com/artifactory/corda-dependencies-dev"
|
||||||
|
}
|
||||||
maven {
|
maven {
|
||||||
url "$artifactory_contextUrl/corda-releases"
|
url "$artifactory_contextUrl/corda-releases"
|
||||||
}
|
}
|
||||||
@ -176,6 +177,8 @@ buildscript {
|
|||||||
// Capsule gradle plugin forked and maintained locally to support Gradle 5.x
|
// Capsule gradle plugin forked and maintained locally to support Gradle 5.x
|
||||||
// See https://github.com/corda/gradle-capsule-plugin
|
// See https://github.com/corda/gradle-capsule-plugin
|
||||||
classpath "us.kirchmeier:gradle-capsule-plugin:1.0.4_r3"
|
classpath "us.kirchmeier:gradle-capsule-plugin:1.0.4_r3"
|
||||||
|
classpath group: "com.r3.testing", name: "gradle-distributed-testing-plugin", version: "1.2-SNAPSHOT", changing: true
|
||||||
|
classpath "com.bmuschko:gradle-docker-plugin:5.0.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -183,7 +186,6 @@ plugins {
|
|||||||
// Add the shadow plugin to the plugins classpath for the entire project.
|
// Add the shadow plugin to the plugins classpath for the entire project.
|
||||||
id 'com.github.johnrengelman.shadow' version '2.0.4' apply false
|
id 'com.github.johnrengelman.shadow' version '2.0.4' apply false
|
||||||
id "com.gradle.build-scan" version "2.2.1"
|
id "com.gradle.build-scan" version "2.2.1"
|
||||||
id 'com.bmuschko.docker-remote-api'
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ext {
|
ext {
|
||||||
@ -194,6 +196,7 @@ apply plugin: 'com.github.ben-manes.versions'
|
|||||||
apply plugin: 'net.corda.plugins.publish-utils'
|
apply plugin: 'net.corda.plugins.publish-utils'
|
||||||
apply plugin: 'maven-publish'
|
apply plugin: 'maven-publish'
|
||||||
apply plugin: 'com.jfrog.artifactory'
|
apply plugin: 'com.jfrog.artifactory'
|
||||||
|
apply plugin: "com.bmuschko.docker-remote-api"
|
||||||
|
|
||||||
// We need the following three lines even though they're inside an allprojects {} block below because otherwise
|
// We need the following three lines even though they're inside an allprojects {} block below because otherwise
|
||||||
// IntelliJ gets confused when importing the project and ends up erasing and recreating the .idea directory, along
|
// IntelliJ gets confused when importing the project and ends up erasing and recreating the .idea directory, along
|
||||||
@ -638,5 +641,5 @@ task allParallelSmokeTest(type: ParallelTestGroup) {
|
|||||||
memoryInGbPerFork 10
|
memoryInGbPerFork 10
|
||||||
distribute DistributeTestsBy.CLASS
|
distribute DistributeTestsBy.CLASS
|
||||||
}
|
}
|
||||||
apply plugin: ImageBuilding
|
apply plugin: 'com.r3.testing.distributed-testing'
|
||||||
apply plugin: DistributedTesting
|
apply plugin: 'com.r3.testing.image-building'
|
||||||
|
@ -1,38 +0,0 @@
|
|||||||
buildscript {
|
|
||||||
Properties constants = new Properties()
|
|
||||||
file("../constants.properties").withInputStream { constants.load(it) }
|
|
||||||
|
|
||||||
ext {
|
|
||||||
guava_version = constants.getProperty("guavaVersion")
|
|
||||||
class_graph_version = constants.getProperty('classgraphVersion')
|
|
||||||
assertj_version = '3.9.1'
|
|
||||||
junit_version = '4.12'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
repositories {
|
|
||||||
mavenLocal()
|
|
||||||
mavenCentral()
|
|
||||||
jcenter()
|
|
||||||
}
|
|
||||||
|
|
||||||
allprojects {
|
|
||||||
tasks.withType(Test) {
|
|
||||||
// Prevent the project from creating temporary files outside of the build directory.
|
|
||||||
systemProperty 'java.io.tmpdir', buildDir.absolutePath
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
dependencies {
|
|
||||||
compile gradleApi()
|
|
||||||
compile "io.fabric8:kubernetes-client:4.4.1"
|
|
||||||
compile 'org.apache.commons:commons-compress:1.19'
|
|
||||||
compile 'org.apache.commons:commons-lang3:3.9'
|
|
||||||
compile 'commons-codec:commons-codec:1.13'
|
|
||||||
compile "io.github.classgraph:classgraph:$class_graph_version"
|
|
||||||
compile "com.bmuschko:gradle-docker-plugin:5.0.0"
|
|
||||||
compile 'org.apache.commons:commons-csv:1.1'
|
|
||||||
compile group: 'org.jetbrains', name: 'annotations', version: '13.0'
|
|
||||||
testCompile "junit:junit:$junit_version"
|
|
||||||
testCompile group: 'org.hamcrest', name: 'hamcrest-all', version: '1.3'
|
|
||||||
}
|
|
@ -1,3 +0,0 @@
|
|||||||
rootProject.name = 'buildSrc'
|
|
||||||
|
|
||||||
apply from: '../buildCacheSettings.gradle'
|
|
@ -1,147 +0,0 @@
|
|||||||
package net.corda.testing;
|
|
||||||
|
|
||||||
import okhttp3.*;
|
|
||||||
import org.apache.commons.compress.utils.IOUtils;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
import org.jetbrains.annotations.Nullable;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import java.io.*;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Used by TestArtifacts
|
|
||||||
*/
|
|
||||||
public class Artifactory {
|
|
||||||
|
|
||||||
//<editor-fold desc="Statics">
|
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(Artifactory.class);
|
|
||||||
|
|
||||||
private static String authorization() {
|
|
||||||
return Credentials.basic(Properties.getUsername(), Properties.getPassword());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Construct the URL in a style that Artifactory prefers.
|
|
||||||
*
|
|
||||||
* @param baseUrl e.g. https://software.r3.com/artifactory/corda-releases/net/corda/corda/
|
|
||||||
* @param theTag e.g. 4.3-RC0
|
|
||||||
* @param artifact e.g. corda
|
|
||||||
* @param extension e.g. jar
|
|
||||||
* @return full URL to artifact.
|
|
||||||
*/
|
|
||||||
private static String getFullUrl(@NotNull final String baseUrl,
|
|
||||||
@NotNull final String theTag,
|
|
||||||
@NotNull final String artifact,
|
|
||||||
@NotNull final String extension) {
|
|
||||||
return baseUrl + "/" + theTag + "/" + getFileName(artifact, extension, theTag);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param artifact e.g. corda
|
|
||||||
* @param extension e.g. jar
|
|
||||||
* @param theTag e.g. 4.3
|
|
||||||
* @return e.g. corda-4.3.jar
|
|
||||||
*/
|
|
||||||
static String getFileName(@NotNull final String artifact,
|
|
||||||
@NotNull final String extension,
|
|
||||||
@Nullable final String theTag) {
|
|
||||||
StringBuilder sb = new StringBuilder().append(artifact);
|
|
||||||
if (theTag != null) {
|
|
||||||
sb.append("-").append(theTag);
|
|
||||||
}
|
|
||||||
sb.append(".").append(extension);
|
|
||||||
return sb.toString();
|
|
||||||
}
|
|
||||||
//</editor-fold>
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the unit tests, synchronous get.
|
|
||||||
* See https://www.jfrog.com/confluence/display/RTF/Artifactory+REST+API#ArtifactoryRESTAPI-RetrieveLatestArtifact
|
|
||||||
*
|
|
||||||
* @return true if successful, false otherwise.
|
|
||||||
*/
|
|
||||||
boolean get(@NotNull final String baseUrl,
|
|
||||||
@NotNull final String theTag,
|
|
||||||
@NotNull final String artifact,
|
|
||||||
@NotNull final String extension,
|
|
||||||
@NotNull final OutputStream outputStream) {
|
|
||||||
final String url = getFullUrl(baseUrl, theTag, artifact, extension);
|
|
||||||
final Request request = new Request.Builder()
|
|
||||||
.addHeader("Authorization", authorization())
|
|
||||||
.url(url)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
final OkHttpClient client = new OkHttpClient();
|
|
||||||
|
|
||||||
try (Response response = client.newCall(request).execute()) {
|
|
||||||
handleResponse(response);
|
|
||||||
if (response.body() != null) {
|
|
||||||
outputStream.write(response.body().bytes());
|
|
||||||
} else {
|
|
||||||
LOG.warn("Response body was empty");
|
|
||||||
}
|
|
||||||
} catch (IOException e) {
|
|
||||||
LOG.warn("Unable to execute GET via REST");
|
|
||||||
LOG.debug("Exception", e);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
LOG.warn("Ok. REST GET successful");
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Post an artifact, synchronous PUT
|
|
||||||
* See https://www.jfrog.com/confluence/display/RTF/Artifactory+REST+API#ArtifactoryRESTAPI-DeployArtifact
|
|
||||||
*
|
|
||||||
* @return true if successful
|
|
||||||
*/
|
|
||||||
boolean put(@NotNull final String baseUrl,
|
|
||||||
@NotNull final String theTag,
|
|
||||||
@NotNull final String artifact,
|
|
||||||
@NotNull final String extension,
|
|
||||||
@NotNull final InputStream inputStream) {
|
|
||||||
final MediaType contentType = MediaType.parse("application/zip, application/octet-stream");
|
|
||||||
final String url = getFullUrl(baseUrl, theTag, artifact, extension);
|
|
||||||
|
|
||||||
final OkHttpClient client = new OkHttpClient();
|
|
||||||
|
|
||||||
byte[] bytes;
|
|
||||||
|
|
||||||
try {
|
|
||||||
bytes = IOUtils.toByteArray(inputStream);
|
|
||||||
} catch (IOException e) {
|
|
||||||
LOG.warn("Unable to execute PUT tests via REST: ", e);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
final Request request = new Request.Builder()
|
|
||||||
.addHeader("Authorization", authorization())
|
|
||||||
.url(url)
|
|
||||||
.put(RequestBody.create(contentType, bytes))
|
|
||||||
.build();
|
|
||||||
|
|
||||||
try (Response response = client.newCall(request).execute()) {
|
|
||||||
handleResponse(response);
|
|
||||||
} catch (IOException e) {
|
|
||||||
LOG.warn("Unable to execute PUT via REST: ", e);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void handleResponse(@NotNull final Response response) throws IOException {
|
|
||||||
if (response.isSuccessful()) return;
|
|
||||||
|
|
||||||
LOG.warn("Bad response from server: {}", response.toString());
|
|
||||||
LOG.warn(response.toString());
|
|
||||||
if (response.code() == 401) {
|
|
||||||
throw new IOException("Not authorized - incorrect credentials?");
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new IOException(response.message());
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,209 +0,0 @@
|
|||||||
package net.corda.testing;
|
|
||||||
|
|
||||||
//Why Java?! because sometimes types are useful.
|
|
||||||
|
|
||||||
import groovy.lang.Tuple2;
|
|
||||||
import org.gradle.api.tasks.TaskAction;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.Comparator;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.concurrent.TimeUnit;
|
|
||||||
import java.util.function.Supplier;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
import java.util.stream.IntStream;
|
|
||||||
|
|
||||||
import static net.corda.testing.ListTests.DISTRIBUTION_PROPERTY;
|
|
||||||
|
|
||||||
public class BucketingAllocator {
|
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(BucketingAllocator.class);
|
|
||||||
private final List<TestsForForkContainer> forkContainers;
|
|
||||||
private final Supplier<Tests> timedTestsProvider;
|
|
||||||
private List<Tuple2<TestLister, Object>> sources = new ArrayList<>();
|
|
||||||
|
|
||||||
private DistributeTestsBy distribution = System.getProperty(DISTRIBUTION_PROPERTY) != null && !System.getProperty(DISTRIBUTION_PROPERTY).isEmpty() ?
|
|
||||||
DistributeTestsBy.valueOf(System.getProperty(DISTRIBUTION_PROPERTY)) : DistributeTestsBy.METHOD;
|
|
||||||
|
|
||||||
|
|
||||||
public BucketingAllocator(Integer forkCount, Supplier<Tests> timedTestsProvider) {
|
|
||||||
this.forkContainers = IntStream.range(0, forkCount).mapToObj(TestsForForkContainer::new).collect(Collectors.toList());
|
|
||||||
this.timedTestsProvider = timedTestsProvider;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void addSource(TestLister source, Object testTask) {
|
|
||||||
sources.add(new Tuple2<>(source, testTask));
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<String> getTestsForForkAndTestTask(Integer fork, Object testTask) {
|
|
||||||
return forkContainers.get(fork).getTestsForTask(testTask);
|
|
||||||
}
|
|
||||||
|
|
||||||
@TaskAction
|
|
||||||
public void generateTestPlan() {
|
|
||||||
Tests allTestsFromFile = timedTestsProvider.get();
|
|
||||||
List<Tuple2<String, Object>> allDiscoveredTests = getTestsOnClasspathOfTestingTasks();
|
|
||||||
List<TestBucket> matchedTests = matchClasspathTestsToFile(allTestsFromFile, allDiscoveredTests);
|
|
||||||
|
|
||||||
//use greedy algo - for each testbucket find the currently smallest container and add to it
|
|
||||||
allocateTestsToForks(matchedTests);
|
|
||||||
forkContainers.forEach(TestsForForkContainer::freeze);
|
|
||||||
|
|
||||||
printSummary();
|
|
||||||
}
|
|
||||||
|
|
||||||
static String getDuration(long nanos) {
|
|
||||||
long t = TimeUnit.NANOSECONDS.toMinutes(nanos);
|
|
||||||
if (t > 0) {
|
|
||||||
return t + " mins";
|
|
||||||
}
|
|
||||||
t = TimeUnit.NANOSECONDS.toSeconds(nanos);
|
|
||||||
if (t > 0) {
|
|
||||||
return t + " secs";
|
|
||||||
}
|
|
||||||
t = TimeUnit.NANOSECONDS.toMillis(nanos);
|
|
||||||
if (t > 0) {
|
|
||||||
return t + " ms";
|
|
||||||
}
|
|
||||||
return nanos + " ns";
|
|
||||||
}
|
|
||||||
|
|
||||||
private void printSummary() {
|
|
||||||
forkContainers.forEach(container -> {
|
|
||||||
System.out.println("####### TEST PLAN SUMMARY ( " + container.forkIdx + " ) #######");
|
|
||||||
System.out.println("Duration: " + getDuration(container.getCurrentDuration()));
|
|
||||||
System.out.println("Number of tests: " + container.testsForFork.stream().mapToInt(b -> b.foundTests.size()).sum());
|
|
||||||
System.out.println("Tests to Run: ");
|
|
||||||
container.testsForFork.forEach(tb -> {
|
|
||||||
System.out.println(tb.testName);
|
|
||||||
tb.foundTests.forEach(ft -> System.out.println("\t" + ft.getFirst() + ", " + getDuration(ft.getSecond())));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private void allocateTestsToForks(@NotNull List<TestBucket> matchedTests) {
|
|
||||||
matchedTests.forEach(matchedTestBucket -> {
|
|
||||||
TestsForForkContainer smallestContainer = Collections.min(forkContainers, Comparator.comparing(TestsForForkContainer::getCurrentDuration));
|
|
||||||
smallestContainer.addBucket(matchedTestBucket);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
List<TestsForForkContainer> getForkContainers() {
|
|
||||||
return forkContainers;
|
|
||||||
}
|
|
||||||
|
|
||||||
private List<TestBucket> matchClasspathTestsToFile(@NotNull final Tests tests,
|
|
||||||
@NotNull final List<Tuple2<String, Object>> allDiscoveredTests) {
|
|
||||||
// Note that this does not preserve the order of tests with known and unknown durations, as we
|
|
||||||
// always return a duration from 'tests.startsWith'.
|
|
||||||
return allDiscoveredTests.stream().map(tuple -> {
|
|
||||||
final String testName = tuple.getFirst();
|
|
||||||
final Object task = tuple.getSecond();
|
|
||||||
|
|
||||||
// If the gradle task is distributing by class rather than method, then 'testName' will be the className
|
|
||||||
// and not className.testName
|
|
||||||
// No matter which it is, we return the mean test duration as the duration value if not found.
|
|
||||||
final List<Tuple2<String, Long>> matchingTests;
|
|
||||||
switch (distribution) {
|
|
||||||
case METHOD:
|
|
||||||
matchingTests = tests.equals(testName);
|
|
||||||
break;
|
|
||||||
case CLASS:
|
|
||||||
matchingTests = tests.startsWith(testName);
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("Unknown distribution type: " + distribution);
|
|
||||||
}
|
|
||||||
|
|
||||||
return new TestBucket(task, testName, matchingTests);
|
|
||||||
}).sorted(Comparator.comparing(TestBucket::getDuration).reversed()).collect(Collectors.toList());
|
|
||||||
}
|
|
||||||
|
|
||||||
private List<Tuple2<String, Object>> getTestsOnClasspathOfTestingTasks() {
|
|
||||||
return sources.stream().map(source -> {
|
|
||||||
TestLister lister = source.getFirst();
|
|
||||||
Object testTask = source.getSecond();
|
|
||||||
return lister.getAllTestsDiscovered().stream().map(test -> new Tuple2<>(test, testTask)).collect(Collectors.toList());
|
|
||||||
}).flatMap(Collection::stream).sorted(Comparator.comparing(Tuple2::getFirst)).collect(Collectors.toList());
|
|
||||||
}
|
|
||||||
|
|
||||||
public static class TestBucket {
|
|
||||||
final Object testTask;
|
|
||||||
final String testName;
|
|
||||||
final List<Tuple2<String, Long>> foundTests;
|
|
||||||
final long durationNanos;
|
|
||||||
|
|
||||||
public TestBucket(@NotNull final Object testTask,
|
|
||||||
@NotNull final String testName,
|
|
||||||
@NotNull final List<Tuple2<String, Long>> foundTests) {
|
|
||||||
this.testTask = testTask;
|
|
||||||
this.testName = testName;
|
|
||||||
this.foundTests = foundTests;
|
|
||||||
this.durationNanos = foundTests.stream().mapToLong(tp -> Math.max(tp.getSecond(), 1)).sum();
|
|
||||||
}
|
|
||||||
|
|
||||||
public long getDuration() {
|
|
||||||
return durationNanos;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return "TestBucket{" +
|
|
||||||
"testTask=" + testTask +
|
|
||||||
", nameWithAsterix='" + testName + '\'' +
|
|
||||||
", foundTests=" + foundTests +
|
|
||||||
", durationNanos=" + durationNanos +
|
|
||||||
'}';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static class TestsForForkContainer {
|
|
||||||
private final Integer forkIdx;
|
|
||||||
private final List<TestBucket> testsForFork = Collections.synchronizedList(new ArrayList<>());
|
|
||||||
private final Map<Object, List<TestBucket>> frozenTests = new HashMap<>();
|
|
||||||
private long runningDuration = 0L;
|
|
||||||
|
|
||||||
public TestsForForkContainer(Integer forkIdx) {
|
|
||||||
this.forkIdx = forkIdx;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void addBucket(TestBucket tb) {
|
|
||||||
this.testsForFork.add(tb);
|
|
||||||
this.runningDuration = this.runningDuration + tb.durationNanos;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Long getCurrentDuration() {
|
|
||||||
return runningDuration;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void freeze() {
|
|
||||||
testsForFork.forEach(tb -> {
|
|
||||||
frozenTests.computeIfAbsent(tb.testTask, i -> new ArrayList<>()).add(tb);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<String> getTestsForTask(Object task) {
|
|
||||||
return frozenTests.getOrDefault(task, Collections.emptyList()).stream().map(it -> it.testName).collect(Collectors.toList());
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<TestBucket> getBucketsForFork() {
|
|
||||||
return new ArrayList<>(testsForFork);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return "TestsForForkContainer{" +
|
|
||||||
"runningDuration=" + runningDuration +
|
|
||||||
", forkIdx=" + forkIdx +
|
|
||||||
", testsForFork=" + testsForFork +
|
|
||||||
", frozenTests=" + frozenTests +
|
|
||||||
'}';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,32 +0,0 @@
|
|||||||
package net.corda.testing;
|
|
||||||
|
|
||||||
import org.gradle.api.DefaultTask;
|
|
||||||
import org.gradle.api.tasks.TaskAction;
|
|
||||||
import org.gradle.api.tasks.testing.Test;
|
|
||||||
|
|
||||||
import javax.inject.Inject;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
public class BucketingAllocatorTask extends DefaultTask {
|
|
||||||
private final BucketingAllocator allocator;
|
|
||||||
|
|
||||||
@Inject
|
|
||||||
public BucketingAllocatorTask(Integer forkCount) {
|
|
||||||
this.allocator = new BucketingAllocator(forkCount, TestDurationArtifacts.getTestsSupplier());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void addSource(TestLister source, Test testTask) {
|
|
||||||
allocator.addSource(source, testTask);
|
|
||||||
this.dependsOn(source);
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<String> getTestIncludesForForkAndTestTask(Integer fork, Test testTask) {
|
|
||||||
return allocator.getTestsForForkAndTestTask(fork, testTask).stream().map(t -> t + "*").collect(Collectors.toList());
|
|
||||||
}
|
|
||||||
|
|
||||||
@TaskAction
|
|
||||||
public void allocate() {
|
|
||||||
allocator.generateTestPlan();
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,5 +0,0 @@
|
|||||||
package net.corda.testing;
|
|
||||||
|
|
||||||
public enum DistributeTestsBy {
|
|
||||||
CLASS, METHOD
|
|
||||||
}
|
|
@ -1,310 +0,0 @@
|
|||||||
package net.corda.testing
|
|
||||||
|
|
||||||
import com.bmuschko.gradle.docker.tasks.image.DockerBuildImage
|
|
||||||
import com.bmuschko.gradle.docker.tasks.image.DockerPushImage
|
|
||||||
import org.gradle.api.GradleException
|
|
||||||
import org.gradle.api.Plugin
|
|
||||||
import org.gradle.api.Project
|
|
||||||
import org.gradle.api.Task
|
|
||||||
import org.gradle.api.tasks.testing.Test
|
|
||||||
import org.gradle.api.tasks.testing.TestResult
|
|
||||||
import org.gradle.internal.impldep.junit.framework.TestFailure
|
|
||||||
|
|
||||||
import java.util.stream.Collectors
|
|
||||||
|
|
||||||
/**
|
|
||||||
This plugin is responsible for wiring together the various components of test task modification
|
|
||||||
*/
|
|
||||||
class DistributedTesting implements Plugin<Project> {
|
|
||||||
|
|
||||||
public static final String GRADLE_GROUP = "Distributed Testing";
|
|
||||||
|
|
||||||
static def getPropertyAsInt(Project proj, String property, Integer defaultValue) {
|
|
||||||
return proj.hasProperty(property) ? Integer.parseInt(proj.property(property).toString()) : defaultValue
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
void apply(Project project) {
|
|
||||||
if (System.getProperty("kubenetize") != null) {
|
|
||||||
Properties.setRootProjectType(project.rootProject.name)
|
|
||||||
|
|
||||||
Integer forks = getPropertyAsInt(project, "dockerForks", 1)
|
|
||||||
|
|
||||||
ensureImagePluginIsApplied(project)
|
|
||||||
ImageBuilding imagePlugin = project.plugins.getPlugin(ImageBuilding)
|
|
||||||
DockerPushImage imagePushTask = imagePlugin.pushTask
|
|
||||||
DockerBuildImage imageBuildTask = imagePlugin.buildTask
|
|
||||||
String tagToUseForRunningTests = System.getProperty(ImageBuilding.PROVIDE_TAG_FOR_RUNNING_PROPERTY)
|
|
||||||
String tagToUseForBuilding = System.getProperty(ImageBuilding.PROVIDE_TAG_FOR_RUNNING_PROPERTY)
|
|
||||||
BucketingAllocatorTask globalAllocator = project.tasks.create("bucketingAllocator", BucketingAllocatorTask, forks)
|
|
||||||
globalAllocator.group = GRADLE_GROUP
|
|
||||||
globalAllocator.description = "Allocates tests to buckets"
|
|
||||||
|
|
||||||
|
|
||||||
Set<String> requestedTaskNames = project.gradle.startParameter.taskNames.toSet()
|
|
||||||
def requestedTasks = requestedTaskNames.collect { project.tasks.findByPath(it) }
|
|
||||||
|
|
||||||
//in each subproject
|
|
||||||
//1. add the task to determine all tests within the module and register this as a source to the global allocator
|
|
||||||
//2. modify the underlying testing task to use the output of the global allocator to include a subset of tests for each fork
|
|
||||||
//3. KubesTest will invoke these test tasks in a parallel fashion on a remote k8s cluster
|
|
||||||
//4. after each completed test write its name to a file to keep track of what finished for restart purposes
|
|
||||||
project.subprojects { Project subProject ->
|
|
||||||
subProject.tasks.withType(Test) { Test task ->
|
|
||||||
project.logger.info("Evaluating ${task.getPath()}")
|
|
||||||
if (task in requestedTasks && !task.hasProperty("ignoreForDistribution")) {
|
|
||||||
project.logger.info "Modifying ${task.getPath()}"
|
|
||||||
Task testListerTask = createTestListingTasks(task, subProject)
|
|
||||||
globalAllocator.addSource(testListerTask, task)
|
|
||||||
Test modifiedTestTask = modifyTestTaskForParallelExecution(subProject, task, globalAllocator)
|
|
||||||
} else {
|
|
||||||
project.logger.info "Skipping modification of ${task.getPath()} as it's not scheduled for execution"
|
|
||||||
}
|
|
||||||
if (!task.hasProperty("ignoreForDistribution")) {
|
|
||||||
//this is what enables execution of a single test suite - for example node:parallelTest would execute all unit tests in node, node:parallelIntegrationTest would do the same for integration tests
|
|
||||||
KubesTest parallelTestTask = generateParallelTestingTask(subProject, task, imagePushTask, tagToUseForRunningTests)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//now we are going to create "super" groupings of the Test tasks, so that it is possible to invoke all submodule tests with a single command
|
|
||||||
//group all test Tasks by their underlying target task (test/integrationTest/smokeTest ... etc)
|
|
||||||
Map<String, List<Test>> allTestTasksGroupedByType = project.subprojects.collect { prj -> prj.getAllTasks(false).values() }
|
|
||||||
.flatten()
|
|
||||||
.findAll { task -> task instanceof Test }
|
|
||||||
.groupBy { Test task -> task.name }
|
|
||||||
|
|
||||||
//first step is to create a single task which will invoke all the submodule tasks for each grouping
|
|
||||||
//ie allParallelTest will invoke [node:test, core:test, client:rpc:test ... etc]
|
|
||||||
//ie allIntegrationTest will invoke [node:integrationTest, core:integrationTest, client:rpc:integrationTest ... etc]
|
|
||||||
//ie allUnitAndIntegrationTest will invoke [node:integrationTest, node:test, core:integrationTest, core:test, client:rpc:test , client:rpc:integrationTest ... etc]
|
|
||||||
Set<ParallelTestGroup> userGroups = new HashSet<>(project.tasks.withType(ParallelTestGroup))
|
|
||||||
|
|
||||||
userGroups.forEach { testGrouping ->
|
|
||||||
|
|
||||||
//for each "group" (ie: test, integrationTest) within the grouping find all the Test tasks which have the same name.
|
|
||||||
List<Test> testTasksToRunInGroup = ((ParallelTestGroup) testGrouping).getGroups().collect {
|
|
||||||
allTestTasksGroupedByType.get(it)
|
|
||||||
}.flatten()
|
|
||||||
|
|
||||||
//join up these test tasks into a single set of tasks to invoke (node:test, node:integrationTest...)
|
|
||||||
String superListOfTasks = testTasksToRunInGroup.collect { it.path }.join(" ")
|
|
||||||
|
|
||||||
//generate a preAllocate / deAllocate task which allows you to "pre-book" a node during the image building phase
|
|
||||||
//this prevents time lost to cloud provider node spin up time (assuming image build time > provider spin up time)
|
|
||||||
def (Task preAllocateTask, Task deAllocateTask) = generatePreAllocateAndDeAllocateTasksForGrouping(project, testGrouping)
|
|
||||||
|
|
||||||
//modify the image building task to depend on the preAllocate task (if specified on the command line) - this prevents gradle running out of order
|
|
||||||
if (preAllocateTask.name in requestedTaskNames) {
|
|
||||||
imageBuildTask.dependsOn preAllocateTask
|
|
||||||
imagePushTask.finalizedBy(deAllocateTask)
|
|
||||||
}
|
|
||||||
|
|
||||||
def userDefinedParallelTask = project.rootProject.tasks.create("userDefined" + testGrouping.getName().capitalize(), KubesTest) {
|
|
||||||
group = GRADLE_GROUP
|
|
||||||
|
|
||||||
if (!tagToUseForRunningTests) {
|
|
||||||
dependsOn imagePushTask
|
|
||||||
}
|
|
||||||
|
|
||||||
if (deAllocateTask.name in requestedTaskNames) {
|
|
||||||
dependsOn deAllocateTask
|
|
||||||
}
|
|
||||||
numberOfPods = testGrouping.getShardCount()
|
|
||||||
printOutput = testGrouping.getPrintToStdOut()
|
|
||||||
fullTaskToExecutePath = superListOfTasks
|
|
||||||
taskToExecuteName = testGrouping.getGroups().join("And")
|
|
||||||
memoryGbPerFork = testGrouping.getGbOfMemory()
|
|
||||||
numberOfCoresPerFork = testGrouping.getCoresToUse()
|
|
||||||
distribution = testGrouping.getDistribution()
|
|
||||||
podLogLevel = testGrouping.getLogLevel()
|
|
||||||
taints = testGrouping.getNodeTaints()
|
|
||||||
sidecarImage = testGrouping.sidecarImage
|
|
||||||
additionalArgs = testGrouping.additionalArgs
|
|
||||||
doFirst {
|
|
||||||
dockerTag = tagToUseForRunningTests ? (ImageBuilding.registryName + ":" + tagToUseForRunningTests) : (imagePushTask.imageName.get() + ":" + imagePushTask.tag.get())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
def reportOnAllTask = project.rootProject.tasks.create("userDefinedReports${testGrouping.getName().capitalize()}", KubesReporting) {
|
|
||||||
group = GRADLE_GROUP
|
|
||||||
dependsOn userDefinedParallelTask
|
|
||||||
destinationDir new File(project.rootProject.getBuildDir(), "userDefinedReports${testGrouping.getName().capitalize()}")
|
|
||||||
doFirst {
|
|
||||||
destinationDir.deleteDir()
|
|
||||||
shouldPrintOutput = !testGrouping.getPrintToStdOut()
|
|
||||||
podResults = userDefinedParallelTask.containerResults
|
|
||||||
reportOn(userDefinedParallelTask.testOutput)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Task to zip up test results, and upload them to somewhere (Artifactory).
|
|
||||||
def zipTask = TestDurationArtifacts.createZipTask(project.rootProject, testGrouping.name, userDefinedParallelTask)
|
|
||||||
|
|
||||||
userDefinedParallelTask.finalizedBy(reportOnAllTask)
|
|
||||||
zipTask.dependsOn(userDefinedParallelTask)
|
|
||||||
testGrouping.dependsOn(zipTask)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Added only so that we can manually run zipTask on the command line as a test.
|
|
||||||
TestDurationArtifacts.createZipTask(project.rootProject, "zipTask", null)
|
|
||||||
.setDescription("Zip task that can be run locally for testing");
|
|
||||||
}
|
|
||||||
|
|
||||||
private List<Task> generatePreAllocateAndDeAllocateTasksForGrouping(Project project, ParallelTestGroup testGrouping) {
|
|
||||||
PodAllocator allocator = new PodAllocator(project.getLogger())
|
|
||||||
Task preAllocateTask = project.rootProject.tasks.create("preAllocateFor" + testGrouping.getName().capitalize()) {
|
|
||||||
group = GRADLE_GROUP
|
|
||||||
doFirst {
|
|
||||||
String dockerTag = System.getProperty(ImageBuilding.PROVIDE_TAG_FOR_BUILDING_PROPERTY)
|
|
||||||
if (dockerTag == null) {
|
|
||||||
throw new GradleException("pre allocation cannot be used without a stable docker tag - please provide one using -D" + ImageBuilding.PROVIDE_TAG_FOR_BUILDING_PROPERTY)
|
|
||||||
}
|
|
||||||
int seed = (dockerTag.hashCode() + testGrouping.getName().hashCode())
|
|
||||||
String podPrefix = new BigInteger(64, new Random(seed)).toString(36)
|
|
||||||
//here we will pre-request the correct number of pods for this testGroup
|
|
||||||
int numberOfPodsToRequest = testGrouping.getShardCount()
|
|
||||||
int coresPerPod = testGrouping.getCoresToUse()
|
|
||||||
int memoryGBPerPod = testGrouping.getGbOfMemory()
|
|
||||||
allocator.allocatePods(numberOfPodsToRequest, coresPerPod, memoryGBPerPod, podPrefix, testGrouping.getNodeTaints())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Task deAllocateTask = project.rootProject.tasks.create("deAllocateFor" + testGrouping.getName().capitalize()) {
|
|
||||||
group = GRADLE_GROUP
|
|
||||||
doFirst {
|
|
||||||
String dockerTag = System.getProperty(ImageBuilding.PROVIDE_TAG_FOR_RUNNING_PROPERTY) ?:
|
|
||||||
System.getProperty(ImageBuilding.PROVIDE_TAG_FOR_BUILDING_PROPERTY)
|
|
||||||
if (dockerTag == null) {
|
|
||||||
throw new GradleException("pre allocation cannot be used without a stable docker tag - please provide one using -D" + ImageBuilding.PROVIDE_TAG_FOR_RUNNING_PROPERTY)
|
|
||||||
}
|
|
||||||
int seed = (dockerTag.hashCode() + testGrouping.getName().hashCode())
|
|
||||||
String podPrefix = new BigInteger(64, new Random(seed)).toString(36);
|
|
||||||
allocator.tearDownPods(podPrefix)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return [preAllocateTask, deAllocateTask]
|
|
||||||
}
|
|
||||||
|
|
||||||
private KubesTest generateParallelTestingTask(Project projectContainingTask, Test task, DockerPushImage imageBuildingTask, String providedTag) {
|
|
||||||
def taskName = task.getName()
|
|
||||||
def capitalizedTaskName = task.getName().capitalize()
|
|
||||||
|
|
||||||
KubesTest createdParallelTestTask = projectContainingTask.tasks.create("parallel" + capitalizedTaskName, KubesTest) {
|
|
||||||
group = GRADLE_GROUP + " Parallel Test Tasks"
|
|
||||||
if (!providedTag) {
|
|
||||||
dependsOn imageBuildingTask
|
|
||||||
}
|
|
||||||
printOutput = true
|
|
||||||
fullTaskToExecutePath = task.getPath()
|
|
||||||
taskToExecuteName = taskName
|
|
||||||
doFirst {
|
|
||||||
dockerTag = providedTag ? ImageBuilding.registryName + ":" + providedTag : (imageBuildingTask.imageName.get() + ":" + imageBuildingTask.tag.get())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
projectContainingTask.logger.info "Created task: ${createdParallelTestTask.getPath()} to enable testing on kubenetes for task: ${task.getPath()}"
|
|
||||||
return createdParallelTestTask as KubesTest
|
|
||||||
}
|
|
||||||
|
|
||||||
private Test modifyTestTaskForParallelExecution(Project subProject, Test task, BucketingAllocatorTask globalAllocator) {
|
|
||||||
subProject.logger.info("modifying task: ${task.getPath()} to depend on task ${globalAllocator.getPath()}")
|
|
||||||
def reportsDir = new File(new File(KubesTest.TEST_RUN_DIR, "test-reports"), subProject.name + "-" + task.name)
|
|
||||||
reportsDir.mkdirs()
|
|
||||||
File executedTestsFile = new File(KubesTest.TEST_RUN_DIR + "/executedTests.txt")
|
|
||||||
task.configure {
|
|
||||||
dependsOn globalAllocator
|
|
||||||
binResultsDir new File(reportsDir, "binary")
|
|
||||||
reports.junitXml.destination new File(reportsDir, "xml")
|
|
||||||
maxHeapSize = "10g"
|
|
||||||
|
|
||||||
doFirst {
|
|
||||||
executedTestsFile.createNewFile()
|
|
||||||
filter {
|
|
||||||
List<String> executedTests = executedTestsFile.readLines()
|
|
||||||
//adding wildcard to each test so they match the ones in the includes list
|
|
||||||
executedTests.replaceAll({ test -> test + "*" })
|
|
||||||
def fork = getPropertyAsInt(subProject, "dockerFork", 0)
|
|
||||||
subProject.logger.info("requesting tests to include in testing task ${task.getPath()} (idx: ${fork})")
|
|
||||||
List<String> includes = globalAllocator.getTestIncludesForForkAndTestTask(
|
|
||||||
fork,
|
|
||||||
task)
|
|
||||||
subProject.logger.info "got ${includes.size()} tests to include into testing task ${task.getPath()}"
|
|
||||||
subProject.logger.info "INCLUDE: ${includes.toString()} "
|
|
||||||
subProject.logger.info "got ${executedTests.size()} tests to exclude from testing task ${task.getPath()}"
|
|
||||||
subProject.logger.debug "EXCLUDE: ${executedTests.toString()} "
|
|
||||||
if (includes.size() == 0) {
|
|
||||||
subProject.logger.info "Disabling test execution for testing task ${task.getPath()}"
|
|
||||||
excludeTestsMatching "*"
|
|
||||||
}
|
|
||||||
|
|
||||||
List<String> intersection = executedTests.stream()
|
|
||||||
.filter(includes.&contains)
|
|
||||||
.collect(Collectors.toList())
|
|
||||||
subProject.logger.info "got ${intersection.size()} tests in intersection"
|
|
||||||
subProject.logger.info "INTERSECTION: ${intersection.toString()} "
|
|
||||||
includes.removeAll(intersection)
|
|
||||||
|
|
||||||
intersection.forEach { exclude ->
|
|
||||||
subProject.logger.info "excluding: $exclude for testing task ${task.getPath()}"
|
|
||||||
excludeTestsMatching exclude
|
|
||||||
}
|
|
||||||
includes.forEach { include ->
|
|
||||||
subProject.logger.info "including: $include for testing task ${task.getPath()}"
|
|
||||||
includeTestsMatching include
|
|
||||||
}
|
|
||||||
failOnNoMatchingTests false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
afterTest { desc, result ->
|
|
||||||
if (result.getResultType() == TestResult.ResultType.SUCCESS ) {
|
|
||||||
executedTestsFile.withWriterAppend { writer ->
|
|
||||||
writer.writeLine(desc.getClassName() + "." + desc.getName())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return task
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void ensureImagePluginIsApplied(Project project) {
|
|
||||||
project.plugins.apply(ImageBuilding)
|
|
||||||
}
|
|
||||||
|
|
||||||
private Task createTestListingTasks(Test task, Project subProject) {
|
|
||||||
def taskName = task.getName()
|
|
||||||
def capitalizedTaskName = task.getName().capitalize()
|
|
||||||
//determine all the tests which are present in this test task.
|
|
||||||
//this list will then be shared between the various worker forks
|
|
||||||
ListTests createdListTask = subProject.tasks.create("listTestsFor" + capitalizedTaskName, ListTests) {
|
|
||||||
group = GRADLE_GROUP
|
|
||||||
//the convention is that a testing task is backed by a sourceSet with the same name
|
|
||||||
dependsOn subProject.getTasks().getByName("${taskName}Classes")
|
|
||||||
doFirst {
|
|
||||||
//we want to set the test scanning classpath to only the output of the sourceSet - this prevents dependencies polluting the list
|
|
||||||
scanClassPath = task.getTestClassesDirs() ? task.getTestClassesDirs() : Collections.emptyList()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//convenience task to utilize the output of the test listing task to display to local console, useful for debugging missing tests
|
|
||||||
def createdPrintTask = subProject.tasks.create("printTestsFor" + capitalizedTaskName) {
|
|
||||||
group = GRADLE_GROUP
|
|
||||||
dependsOn createdListTask
|
|
||||||
doLast {
|
|
||||||
createdListTask.getTestsForFork(
|
|
||||||
getPropertyAsInt(subProject, "dockerFork", 0),
|
|
||||||
getPropertyAsInt(subProject, "dockerForks", 1),
|
|
||||||
42).forEach { testName ->
|
|
||||||
println testName
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
subProject.logger.info("created task: " + createdListTask.getPath() + " in project: " + subProject + " it dependsOn: " + createdListTask.dependsOn)
|
|
||||||
subProject.logger.info("created task: " + createdPrintTask.getPath() + " in project: " + subProject + " it dependsOn: " + createdPrintTask.dependsOn)
|
|
||||||
|
|
||||||
return createdListTask
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,161 +0,0 @@
|
|||||||
package net.corda.testing;
|
|
||||||
|
|
||||||
import com.bmuschko.gradle.docker.DockerRegistryCredentials;
|
|
||||||
import com.bmuschko.gradle.docker.tasks.container.DockerCreateContainer;
|
|
||||||
import com.bmuschko.gradle.docker.tasks.container.DockerLogsContainer;
|
|
||||||
import com.bmuschko.gradle.docker.tasks.container.DockerRemoveContainer;
|
|
||||||
import com.bmuschko.gradle.docker.tasks.container.DockerStartContainer;
|
|
||||||
import com.bmuschko.gradle.docker.tasks.container.DockerWaitContainer;
|
|
||||||
import com.bmuschko.gradle.docker.tasks.image.DockerBuildImage;
|
|
||||||
import com.bmuschko.gradle.docker.tasks.image.DockerCommitImage;
|
|
||||||
import com.bmuschko.gradle.docker.tasks.image.DockerPullImage;
|
|
||||||
import com.bmuschko.gradle.docker.tasks.image.DockerPushImage;
|
|
||||||
import com.bmuschko.gradle.docker.tasks.image.DockerRemoveImage;
|
|
||||||
import com.bmuschko.gradle.docker.tasks.image.DockerTagImage;
|
|
||||||
import org.gradle.api.GradleException;
|
|
||||||
import org.gradle.api.Plugin;
|
|
||||||
import org.gradle.api.Project;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.UUID;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* this plugin is responsible for setting up all the required docker image building tasks required for producing and pushing an
|
|
||||||
* image of the current build output to a remote container registry
|
|
||||||
*/
|
|
||||||
public class ImageBuilding implements Plugin<Project> {
|
|
||||||
|
|
||||||
public static final String registryName = "stefanotestingcr.azurecr.io/testing";
|
|
||||||
public static final String PROVIDE_TAG_FOR_BUILDING_PROPERTY = "docker.build.tag";
|
|
||||||
public static final String PROVIDE_TAG_FOR_RUNNING_PROPERTY = "docker.run.tag";
|
|
||||||
public DockerPushImage pushTask;
|
|
||||||
public DockerBuildImage buildTask;
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void apply(@NotNull final Project project) {
|
|
||||||
|
|
||||||
final DockerRegistryCredentials registryCredentialsForPush = new DockerRegistryCredentials(project.getObjects());
|
|
||||||
registryCredentialsForPush.getUsername().set("stefanotestingcr");
|
|
||||||
registryCredentialsForPush.getPassword().set(System.getProperty("docker.push.password", ""));
|
|
||||||
|
|
||||||
final DockerPullImage pullTask = project.getTasks().create("pullBaseImage", DockerPullImage.class, dockerPullImage -> {
|
|
||||||
dockerPullImage.doFirst(task -> dockerPullImage.setRegistryCredentials(registryCredentialsForPush));
|
|
||||||
dockerPullImage.getRepository().set("stefanotestingcr.azurecr.io/buildbase");
|
|
||||||
dockerPullImage.getTag().set("latest");
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
final DockerBuildImage buildDockerImageForSource = project.getTasks().create("buildDockerImageForSource", DockerBuildImage.class,
|
|
||||||
dockerBuildImage -> {
|
|
||||||
dockerBuildImage.dependsOn(Arrays.asList(project.getRootProject().getTasksByName("clean", true), pullTask));
|
|
||||||
dockerBuildImage.getInputDir().set(new File("."));
|
|
||||||
dockerBuildImage.getDockerFile().set(new File(new File("testing"), "Dockerfile"));
|
|
||||||
});
|
|
||||||
|
|
||||||
this.buildTask = buildDockerImageForSource;
|
|
||||||
|
|
||||||
final DockerCreateContainer createBuildContainer = project.getTasks().create("createBuildContainer", DockerCreateContainer.class,
|
|
||||||
dockerCreateContainer -> {
|
|
||||||
final File baseWorkingDir = new File(System.getProperty("docker.work.dir") != null &&
|
|
||||||
!System.getProperty("docker.work.dir").isEmpty() ?
|
|
||||||
System.getProperty("docker.work.dir") : System.getProperty("java.io.tmpdir"));
|
|
||||||
final File gradleDir = new File(baseWorkingDir, "gradle");
|
|
||||||
final File mavenDir = new File(baseWorkingDir, "maven");
|
|
||||||
dockerCreateContainer.doFirst(task -> {
|
|
||||||
if (!gradleDir.exists()) {
|
|
||||||
gradleDir.mkdirs();
|
|
||||||
}
|
|
||||||
if (!mavenDir.exists()) {
|
|
||||||
mavenDir.mkdirs();
|
|
||||||
}
|
|
||||||
|
|
||||||
project.getLogger().info("Will use: " + gradleDir.getAbsolutePath() + " for caching gradle artifacts");
|
|
||||||
});
|
|
||||||
dockerCreateContainer.dependsOn(buildDockerImageForSource);
|
|
||||||
dockerCreateContainer.targetImageId(buildDockerImageForSource.getImageId());
|
|
||||||
final Map<String, String> map = new HashMap<>();
|
|
||||||
map.put(gradleDir.getAbsolutePath(), "/tmp/gradle");
|
|
||||||
map.put(mavenDir.getAbsolutePath(), "/home/root/.m2");
|
|
||||||
dockerCreateContainer.getBinds().set(map);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
final DockerStartContainer startBuildContainer = project.getTasks().create("startBuildContainer", DockerStartContainer.class,
|
|
||||||
dockerStartContainer -> {
|
|
||||||
dockerStartContainer.dependsOn(createBuildContainer);
|
|
||||||
dockerStartContainer.targetContainerId(createBuildContainer.getContainerId());
|
|
||||||
});
|
|
||||||
|
|
||||||
final DockerLogsContainer logBuildContainer = project.getTasks().create("logBuildContainer", DockerLogsContainer.class,
|
|
||||||
dockerLogsContainer -> {
|
|
||||||
dockerLogsContainer.dependsOn(startBuildContainer);
|
|
||||||
dockerLogsContainer.targetContainerId(createBuildContainer.getContainerId());
|
|
||||||
dockerLogsContainer.getFollow().set(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
final DockerWaitContainer waitForBuildContainer = project.getTasks().create("waitForBuildContainer", DockerWaitContainer.class,
|
|
||||||
dockerWaitContainer -> {
|
|
||||||
dockerWaitContainer.dependsOn(logBuildContainer);
|
|
||||||
dockerWaitContainer.targetContainerId(createBuildContainer.getContainerId());
|
|
||||||
dockerWaitContainer.doLast(task -> {
|
|
||||||
if (dockerWaitContainer.getExitCode() != 0) {
|
|
||||||
throw new GradleException("Failed to build docker image, aborting build");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
final DockerCommitImage commitBuildImageResult = project.getTasks().create("commitBuildImageResult", DockerCommitImage.class,
|
|
||||||
dockerCommitImage -> {
|
|
||||||
dockerCommitImage.dependsOn(waitForBuildContainer);
|
|
||||||
dockerCommitImage.targetContainerId(createBuildContainer.getContainerId());
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
final DockerTagImage tagBuildImageResult = project.getTasks().create("tagBuildImageResult", DockerTagImage.class, dockerTagImage -> {
|
|
||||||
dockerTagImage.dependsOn(commitBuildImageResult);
|
|
||||||
dockerTagImage.getImageId().set(commitBuildImageResult.getImageId());
|
|
||||||
dockerTagImage.getTag().set(System.getProperty(PROVIDE_TAG_FOR_BUILDING_PROPERTY, UUID.randomUUID().toString().toLowerCase().substring(0, 12)));
|
|
||||||
dockerTagImage.getRepository().set(registryName);
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
final DockerPushImage pushBuildImage = project.getTasks().create("pushBuildImage", DockerPushImage.class, dockerPushImage -> {
|
|
||||||
dockerPushImage.dependsOn(tagBuildImageResult);
|
|
||||||
dockerPushImage.doFirst(task -> dockerPushImage.setRegistryCredentials(registryCredentialsForPush));
|
|
||||||
dockerPushImage.getImageName().set(registryName);
|
|
||||||
dockerPushImage.getTag().set(tagBuildImageResult.getTag());
|
|
||||||
});
|
|
||||||
|
|
||||||
this.pushTask = pushBuildImage;
|
|
||||||
|
|
||||||
|
|
||||||
final DockerRemoveContainer deleteContainer = project.getTasks().create("deleteBuildContainer", DockerRemoveContainer.class,
|
|
||||||
dockerRemoveContainer -> {
|
|
||||||
dockerRemoveContainer.dependsOn(pushBuildImage);
|
|
||||||
dockerRemoveContainer.targetContainerId(createBuildContainer.getContainerId());
|
|
||||||
});
|
|
||||||
|
|
||||||
|
|
||||||
final DockerRemoveImage deleteTaggedImage = project.getTasks().create("deleteTaggedImage", DockerRemoveImage.class,
|
|
||||||
dockerRemoveImage -> {
|
|
||||||
dockerRemoveImage.dependsOn(pushBuildImage);
|
|
||||||
dockerRemoveImage.getForce().set(true);
|
|
||||||
dockerRemoveImage.targetImageId(commitBuildImageResult.getImageId());
|
|
||||||
});
|
|
||||||
|
|
||||||
final DockerRemoveImage deleteBuildImage = project.getTasks().create("deleteBuildImage", DockerRemoveImage.class,
|
|
||||||
dockerRemoveImage -> {
|
|
||||||
dockerRemoveImage.dependsOn(deleteContainer, deleteTaggedImage);
|
|
||||||
dockerRemoveImage.getForce().set(true);
|
|
||||||
dockerRemoveImage.targetImageId(buildDockerImageForSource.getImageId());
|
|
||||||
});
|
|
||||||
|
|
||||||
if (System.getProperty("docker.keep.image") == null) {
|
|
||||||
pushBuildImage.finalizedBy(deleteContainer, deleteBuildImage, deleteTaggedImage);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,589 +0,0 @@
|
|||||||
package net.corda.testing;
|
|
||||||
|
|
||||||
import io.fabric8.kubernetes.api.model.ContainerFluent;
|
|
||||||
import io.fabric8.kubernetes.api.model.DoneablePod;
|
|
||||||
import io.fabric8.kubernetes.api.model.PersistentVolumeClaim;
|
|
||||||
import io.fabric8.kubernetes.api.model.Pod;
|
|
||||||
import io.fabric8.kubernetes.api.model.PodBuilder;
|
|
||||||
import io.fabric8.kubernetes.api.model.PodFluent;
|
|
||||||
import io.fabric8.kubernetes.api.model.PodSpecFluent;
|
|
||||||
import io.fabric8.kubernetes.api.model.Quantity;
|
|
||||||
import io.fabric8.kubernetes.api.model.Status;
|
|
||||||
import io.fabric8.kubernetes.api.model.StatusCause;
|
|
||||||
import io.fabric8.kubernetes.api.model.StatusDetails;
|
|
||||||
import io.fabric8.kubernetes.api.model.TolerationBuilder;
|
|
||||||
import io.fabric8.kubernetes.client.DefaultKubernetesClient;
|
|
||||||
import io.fabric8.kubernetes.client.KubernetesClient;
|
|
||||||
import io.fabric8.kubernetes.client.KubernetesClientException;
|
|
||||||
import io.fabric8.kubernetes.client.Watch;
|
|
||||||
import io.fabric8.kubernetes.client.Watcher;
|
|
||||||
import io.fabric8.kubernetes.client.dsl.ExecListener;
|
|
||||||
import io.fabric8.kubernetes.client.dsl.PodResource;
|
|
||||||
import io.fabric8.kubernetes.client.utils.Serialization;
|
|
||||||
import net.corda.testing.retry.Retry;
|
|
||||||
import okhttp3.Response;
|
|
||||||
import org.gradle.api.DefaultTask;
|
|
||||||
import org.gradle.api.tasks.TaskAction;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
|
||||||
import java.io.BufferedWriter;
|
|
||||||
import java.io.ByteArrayOutputStream;
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.FileWriter;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.InputStream;
|
|
||||||
import java.io.InputStreamReader;
|
|
||||||
import java.io.PipedInputStream;
|
|
||||||
import java.io.PipedOutputStream;
|
|
||||||
import java.io.RandomAccessFile;
|
|
||||||
import java.math.BigInteger;
|
|
||||||
import java.nio.channels.FileChannel;
|
|
||||||
import java.nio.channels.FileLock;
|
|
||||||
import java.nio.file.Path;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.LinkedList;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.Queue;
|
|
||||||
import java.util.Random;
|
|
||||||
import java.util.Set;
|
|
||||||
import java.util.concurrent.CompletableFuture;
|
|
||||||
import java.util.concurrent.ExecutionException;
|
|
||||||
import java.util.concurrent.ExecutorService;
|
|
||||||
import java.util.concurrent.Executors;
|
|
||||||
import java.util.concurrent.Future;
|
|
||||||
import java.util.concurrent.TimeUnit;
|
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
import java.util.stream.IntStream;
|
|
||||||
|
|
||||||
public class KubesTest extends DefaultTask {
|
|
||||||
|
|
||||||
static final String TEST_RUN_DIR = "/test-runs";
|
|
||||||
private static final ExecutorService executorService = Executors.newCachedThreadPool();
|
|
||||||
/**
|
|
||||||
* Name of the k8s Secret object that holds the credentials to access the docker image registry
|
|
||||||
*/
|
|
||||||
private static final String REGISTRY_CREDENTIALS_SECRET_NAME = "regcred";
|
|
||||||
|
|
||||||
private static int DEFAULT_K8S_TIMEOUT_VALUE_MILLIES = 60 * 1_000;
|
|
||||||
private static int DEFAULT_K8S_WEBSOCKET_TIMEOUT = DEFAULT_K8S_TIMEOUT_VALUE_MILLIES * 30;
|
|
||||||
private static int DEFAULT_POD_ALLOCATION_TIMEOUT = 60;
|
|
||||||
|
|
||||||
String dockerTag;
|
|
||||||
String fullTaskToExecutePath;
|
|
||||||
String taskToExecuteName;
|
|
||||||
String sidecarImage;
|
|
||||||
Boolean printOutput = false;
|
|
||||||
List<String> additionalArgs;
|
|
||||||
List<String> taints = Collections.emptyList();
|
|
||||||
|
|
||||||
Integer numberOfCoresPerFork = 4;
|
|
||||||
Integer memoryGbPerFork = 6;
|
|
||||||
public volatile List<File> testOutput = Collections.emptyList();
|
|
||||||
public volatile List<KubePodResult> containerResults = Collections.emptyList();
|
|
||||||
private final Set<String> remainingPods = Collections.synchronizedSet(new HashSet<>());
|
|
||||||
|
|
||||||
public static String NAMESPACE = "thisisatest";
|
|
||||||
|
|
||||||
int numberOfPods = 5;
|
|
||||||
|
|
||||||
DistributeTestsBy distribution = DistributeTestsBy.METHOD;
|
|
||||||
PodLogLevel podLogLevel = PodLogLevel.INFO;
|
|
||||||
|
|
||||||
@TaskAction
|
|
||||||
public void runDistributedTests() {
|
|
||||||
String buildId = System.getProperty("buildId", "0");
|
|
||||||
String currentUser = System.getProperty("user.name", "UNKNOWN_USER");
|
|
||||||
|
|
||||||
String stableRunId = rnd64Base36(new Random(buildId.hashCode() + currentUser.hashCode() + taskToExecuteName.hashCode()));
|
|
||||||
String random = rnd64Base36(new Random());
|
|
||||||
|
|
||||||
try (KubernetesClient client = getKubernetesClient()) {
|
|
||||||
client.pods().inNamespace(NAMESPACE).list().getItems().forEach(podToDelete -> {
|
|
||||||
if (podToDelete.getMetadata().getName().contains(stableRunId)) {
|
|
||||||
getProject().getLogger().lifecycle("deleting: " + podToDelete.getMetadata().getName());
|
|
||||||
client.resource(podToDelete).delete();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} catch (Exception ignored) {
|
|
||||||
//it's possible that a pod is being deleted by the original build, this can lead to racey conditions
|
|
||||||
}
|
|
||||||
|
|
||||||
List<Future<KubePodResult>> futures = IntStream.range(0, numberOfPods).mapToObj(i -> {
|
|
||||||
String podName = generatePodName(stableRunId, random, i);
|
|
||||||
return submitBuild(NAMESPACE, numberOfPods, i, podName, printOutput, 3);
|
|
||||||
}).collect(Collectors.toList());
|
|
||||||
|
|
||||||
this.testOutput = Collections.synchronizedList(futures.stream().map(it -> {
|
|
||||||
try {
|
|
||||||
return it.get().getBinaryResults();
|
|
||||||
} catch (InterruptedException | ExecutionException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}).flatMap(Collection::stream).collect(Collectors.toList()));
|
|
||||||
this.containerResults = futures.stream().map(it -> {
|
|
||||||
try {
|
|
||||||
return it.get();
|
|
||||||
} catch (InterruptedException | ExecutionException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}).collect(Collectors.toList());
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
private String generatePodName(String stableRunId, String random, int i) {
|
|
||||||
int magicMaxLength = 63;
|
|
||||||
String provisionalName = taskToExecuteName.toLowerCase() + "-" + stableRunId + "-" + random + "-" + i;
|
|
||||||
//length = 100
|
|
||||||
//100-63 = 37
|
|
||||||
//subString(37, 100) -? string of 63 characters
|
|
||||||
return provisionalName.substring(Math.max(provisionalName.length() - magicMaxLength, 0));
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
private synchronized KubernetesClient getKubernetesClient() {
|
|
||||||
|
|
||||||
try (RandomAccessFile file = new RandomAccessFile("/tmp/refresh.lock", "rw");
|
|
||||||
FileChannel c = file.getChannel();
|
|
||||||
FileLock lock = c.lock()) {
|
|
||||||
|
|
||||||
getProject().getLogger().quiet("Invoking kubectl to attempt to refresh token");
|
|
||||||
ProcessBuilder tokenRefreshCommand = new ProcessBuilder().command("kubectl", "auth", "can-i", "get", "pods");
|
|
||||||
Process refreshProcess = tokenRefreshCommand.start();
|
|
||||||
int resultCodeOfRefresh = refreshProcess.waitFor();
|
|
||||||
getProject().getLogger().quiet("Completed Token refresh");
|
|
||||||
|
|
||||||
if (resultCodeOfRefresh != 0) {
|
|
||||||
throw new RuntimeException("Failed to invoke kubectl to refresh tokens");
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (InterruptedException | IOException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
|
|
||||||
io.fabric8.kubernetes.client.Config config = new io.fabric8.kubernetes.client.ConfigBuilder()
|
|
||||||
.withConnectionTimeout(DEFAULT_K8S_TIMEOUT_VALUE_MILLIES)
|
|
||||||
.withRequestTimeout(DEFAULT_K8S_TIMEOUT_VALUE_MILLIES)
|
|
||||||
.withRollingTimeout(DEFAULT_K8S_TIMEOUT_VALUE_MILLIES)
|
|
||||||
.withWebsocketTimeout(DEFAULT_K8S_WEBSOCKET_TIMEOUT)
|
|
||||||
.withWebsocketPingInterval(DEFAULT_K8S_WEBSOCKET_TIMEOUT)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
return new DefaultKubernetesClient(config);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static String rnd64Base36(Random rnd) {
|
|
||||||
return new BigInteger(64, rnd)
|
|
||||||
.toString(36)
|
|
||||||
.toLowerCase();
|
|
||||||
}
|
|
||||||
|
|
||||||
private CompletableFuture<KubePodResult> submitBuild(
|
|
||||||
String namespace,
|
|
||||||
int numberOfPods,
|
|
||||||
int podIdx,
|
|
||||||
String podName,
|
|
||||||
boolean printOutput,
|
|
||||||
int numberOfRetries
|
|
||||||
) {
|
|
||||||
return CompletableFuture.supplyAsync(() -> {
|
|
||||||
PersistentVolumeClaim pvc = createPvc(podName);
|
|
||||||
return buildRunPodWithRetriesOrThrow(namespace, numberOfPods, podIdx, podName, printOutput, numberOfRetries, pvc);
|
|
||||||
}, executorService);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void addShutdownHook(Runnable hook) {
|
|
||||||
Runtime.getRuntime().addShutdownHook(new Thread(hook));
|
|
||||||
}
|
|
||||||
|
|
||||||
private PersistentVolumeClaim createPvc(String name) {
|
|
||||||
PersistentVolumeClaim pvc;
|
|
||||||
try (KubernetesClient client = getKubernetesClient()) {
|
|
||||||
pvc = client.persistentVolumeClaims()
|
|
||||||
.inNamespace(NAMESPACE)
|
|
||||||
.createNew()
|
|
||||||
.editOrNewMetadata().withName(name).endMetadata()
|
|
||||||
.editOrNewSpec()
|
|
||||||
.withAccessModes("ReadWriteOnce")
|
|
||||||
.editOrNewResources().addToRequests("storage", new Quantity("100Mi")).endResources()
|
|
||||||
.withStorageClassName("testing-storage")
|
|
||||||
.endSpec()
|
|
||||||
.done();
|
|
||||||
}
|
|
||||||
|
|
||||||
addShutdownHook(() -> {
|
|
||||||
try (KubernetesClient client = getKubernetesClient()) {
|
|
||||||
System.out.println("Deleting PVC: " + pvc.getMetadata().getName());
|
|
||||||
client.persistentVolumeClaims().delete(pvc);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return pvc;
|
|
||||||
}
|
|
||||||
|
|
||||||
private KubePodResult buildRunPodWithRetriesOrThrow(
|
|
||||||
String namespace,
|
|
||||||
int numberOfPods,
|
|
||||||
int podIdx,
|
|
||||||
String podName,
|
|
||||||
boolean printOutput,
|
|
||||||
int numberOfRetries,
|
|
||||||
PersistentVolumeClaim pvc) {
|
|
||||||
addShutdownHook(() -> {
|
|
||||||
System.out.println("deleting pod: " + podName);
|
|
||||||
try (KubernetesClient client = getKubernetesClient()) {
|
|
||||||
client.pods().inNamespace(namespace).withName(podName).delete();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
int podNumber = podIdx + 1;
|
|
||||||
final AtomicInteger testRetries = new AtomicInteger(0);
|
|
||||||
try {
|
|
||||||
// pods might die, so we retry
|
|
||||||
return Retry.fixed(numberOfRetries).run(() -> {
|
|
||||||
// remove pod if exists
|
|
||||||
Pod createdPod;
|
|
||||||
try (KubernetesClient client = getKubernetesClient()) {
|
|
||||||
PodResource<Pod, DoneablePod> oldPod = client.pods().inNamespace(namespace).withName(podName);
|
|
||||||
if (oldPod.get() != null) {
|
|
||||||
getLogger().lifecycle("deleting pod: {}", podName);
|
|
||||||
oldPod.delete();
|
|
||||||
while (oldPod.get() != null) {
|
|
||||||
getLogger().info("waiting for pod {} to be removed", podName);
|
|
||||||
Thread.sleep(1000);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
getProject().getLogger().lifecycle("creating pod: " + podName);
|
|
||||||
createdPod = client.pods().inNamespace(namespace).create(buildPodRequest(podName, pvc, sidecarImage != null));
|
|
||||||
remainingPods.add(podName);
|
|
||||||
getProject().getLogger().lifecycle("scheduled pod: " + podName);
|
|
||||||
}
|
|
||||||
|
|
||||||
attachStatusListenerToPod(createdPod);
|
|
||||||
waitForPodToStart(createdPod);
|
|
||||||
|
|
||||||
PipedOutputStream stdOutOs = new PipedOutputStream();
|
|
||||||
PipedInputStream stdOutIs = new PipedInputStream(4096);
|
|
||||||
ByteArrayOutputStream errChannelStream = new ByteArrayOutputStream();
|
|
||||||
|
|
||||||
CompletableFuture<Integer> waiter = new CompletableFuture<>();
|
|
||||||
File podLogsDirectory = new File(getProject().getBuildDir(), "pod-logs");
|
|
||||||
if (!podLogsDirectory.exists()) {
|
|
||||||
podLogsDirectory.mkdirs();
|
|
||||||
}
|
|
||||||
|
|
||||||
File podOutput = executeBuild(namespace, numberOfPods, podIdx, podName, podLogsDirectory, printOutput, stdOutOs, stdOutIs, errChannelStream, waiter);
|
|
||||||
int resCode = waiter.join();
|
|
||||||
getProject().getLogger().lifecycle("build has ended on on pod " + podName + " (" + podNumber + "/" + numberOfPods + ") with result " + resCode + " , gathering results");
|
|
||||||
Collection<File> binaryResults;
|
|
||||||
//we don't retry on the final attempt as this will crash the build and some pods might not get to finish
|
|
||||||
if (resCode != 0 && testRetries.getAndIncrement() < numberOfRetries - 1) {
|
|
||||||
downloadTestXmlFromPod(namespace, createdPod);
|
|
||||||
getProject().getLogger().lifecycle("There are test failures in this pod. Retrying failed tests!!!");
|
|
||||||
throw new RuntimeException("There are test failures in this pod");
|
|
||||||
} else {
|
|
||||||
binaryResults = downloadTestXmlFromPod(namespace, createdPod);
|
|
||||||
}
|
|
||||||
|
|
||||||
getLogger().lifecycle("removing pod " + podName + " (" + podNumber + "/" + numberOfPods + ") after completed build");
|
|
||||||
|
|
||||||
try (KubernetesClient client = getKubernetesClient()) {
|
|
||||||
client.pods().delete(createdPod);
|
|
||||||
client.persistentVolumeClaims().delete(pvc);
|
|
||||||
synchronized (remainingPods) {
|
|
||||||
remainingPods.remove(podName);
|
|
||||||
getLogger().lifecycle("Remaining Pods: ");
|
|
||||||
remainingPods.forEach(pod -> getLogger().lifecycle("\t" + pod));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return new KubePodResult(podIdx, resCode, podOutput, binaryResults);
|
|
||||||
});
|
|
||||||
} catch (Retry.RetryException e) {
|
|
||||||
Pod pod = getKubernetesClient().pods().inNamespace(namespace).create(buildPodRequest(podName, pvc, sidecarImage != null));
|
|
||||||
downloadTestXmlFromPod(namespace, pod);
|
|
||||||
throw new RuntimeException("Failed to build in pod " + podName + " (" + podNumber + "/" + numberOfPods + ") in " + numberOfRetries + " attempts", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
private File executeBuild(String namespace,
|
|
||||||
int numberOfPods,
|
|
||||||
int podIdx,
|
|
||||||
String podName,
|
|
||||||
File podLogsDirectory,
|
|
||||||
boolean printOutput,
|
|
||||||
PipedOutputStream stdOutOs,
|
|
||||||
PipedInputStream stdOutIs,
|
|
||||||
ByteArrayOutputStream errChannelStream,
|
|
||||||
CompletableFuture<Integer> waiter) throws IOException {
|
|
||||||
KubernetesClient client = getKubernetesClient();
|
|
||||||
ExecListener execListener = buildExecListenerForPod(podName, errChannelStream, waiter, client);
|
|
||||||
stdOutIs.connect(stdOutOs);
|
|
||||||
|
|
||||||
String[] buildCommand = getBuildCommand(numberOfPods, podIdx);
|
|
||||||
getProject().getLogger().quiet("About to execute " + Arrays.stream(buildCommand).reduce("", (s, s2) -> s + " " + s2) + " on pod " + podName);
|
|
||||||
client.pods().inNamespace(namespace).withName(podName)
|
|
||||||
.inContainer(podName)
|
|
||||||
.writingOutput(stdOutOs)
|
|
||||||
.writingErrorChannel(errChannelStream)
|
|
||||||
.usingListener(execListener)
|
|
||||||
.exec(buildCommand);
|
|
||||||
|
|
||||||
return startLogPumping(stdOutIs, podIdx, podLogsDirectory, printOutput);
|
|
||||||
}
|
|
||||||
|
|
||||||
private Pod buildPodRequest(String podName, PersistentVolumeClaim pvc, boolean withDb) {
|
|
||||||
if (withDb) {
|
|
||||||
return buildPodRequestWithWorkerNodeAndDbContainer(podName, pvc);
|
|
||||||
} else {
|
|
||||||
return buildPodRequestWithOnlyWorkerNode(podName, pvc);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private Pod buildPodRequestWithOnlyWorkerNode(String podName, PersistentVolumeClaim pvc) {
|
|
||||||
return getBasePodDefinition(podName, pvc)
|
|
||||||
.addToRequests("cpu", new Quantity(numberOfCoresPerFork.toString()))
|
|
||||||
.addToRequests("memory", new Quantity(memoryGbPerFork.toString()))
|
|
||||||
.endResources()
|
|
||||||
.addNewVolumeMount().withName("gradlecache").withMountPath("/tmp/gradle").endVolumeMount()
|
|
||||||
.addNewVolumeMount().withName("testruns").withMountPath(TEST_RUN_DIR).endVolumeMount()
|
|
||||||
.endContainer()
|
|
||||||
.addNewImagePullSecret(REGISTRY_CREDENTIALS_SECRET_NAME)
|
|
||||||
.withRestartPolicy("Never")
|
|
||||||
.endSpec()
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
private Pod buildPodRequestWithWorkerNodeAndDbContainer(String podName, PersistentVolumeClaim pvc) {
|
|
||||||
return getBasePodDefinition(podName, pvc)
|
|
||||||
.addToRequests("cpu", new Quantity(Integer.valueOf(numberOfCoresPerFork - 1).toString()))
|
|
||||||
.addToRequests("memory", new Quantity(Integer.valueOf(memoryGbPerFork - 1).toString() + "Gi"))
|
|
||||||
.endResources()
|
|
||||||
.addNewVolumeMount().withName("gradlecache").withMountPath("/tmp/gradle").endVolumeMount()
|
|
||||||
.addNewVolumeMount().withName("testruns").withMountPath(TEST_RUN_DIR).endVolumeMount()
|
|
||||||
.endContainer()
|
|
||||||
.addNewContainer()
|
|
||||||
.withImage(sidecarImage)
|
|
||||||
.addNewEnv()
|
|
||||||
.withName("DRIVER_NODE_MEMORY")
|
|
||||||
.withValue("1024m")
|
|
||||||
.withName("DRIVER_WEB_MEMORY")
|
|
||||||
.withValue("1024m")
|
|
||||||
.endEnv()
|
|
||||||
.withName(podName + "-pg")
|
|
||||||
.withNewResources()
|
|
||||||
.addToRequests("cpu", new Quantity("1"))
|
|
||||||
.addToRequests("memory", new Quantity("1Gi"))
|
|
||||||
.endResources()
|
|
||||||
.endContainer()
|
|
||||||
|
|
||||||
.addNewImagePullSecret(REGISTRY_CREDENTIALS_SECRET_NAME)
|
|
||||||
.withRestartPolicy("Never")
|
|
||||||
.endSpec()
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
private ContainerFluent.ResourcesNested<PodSpecFluent.ContainersNested<PodFluent.SpecNested<PodBuilder>>> getBasePodDefinition(String podName, PersistentVolumeClaim pvc) {
|
|
||||||
return new PodBuilder()
|
|
||||||
.withNewMetadata().withName(podName).endMetadata()
|
|
||||||
.withNewSpec()
|
|
||||||
|
|
||||||
.addNewVolume()
|
|
||||||
.withName("gradlecache")
|
|
||||||
.withNewHostPath()
|
|
||||||
.withType("DirectoryOrCreate")
|
|
||||||
.withPath("/tmp/gradle")
|
|
||||||
.endHostPath()
|
|
||||||
.endVolume()
|
|
||||||
.addNewVolume()
|
|
||||||
.withName("testruns")
|
|
||||||
.withNewPersistentVolumeClaim()
|
|
||||||
.withClaimName(pvc.getMetadata().getName())
|
|
||||||
.endPersistentVolumeClaim()
|
|
||||||
.endVolume()
|
|
||||||
.withTolerations(taints.stream().map(taint -> new TolerationBuilder().withKey("key").withValue(taint).withOperator("Equal").withEffect("NoSchedule").build()).collect(Collectors.toList()))
|
|
||||||
.addNewContainer()
|
|
||||||
.withImage(dockerTag)
|
|
||||||
.withCommand("bash")
|
|
||||||
.withArgs("-c", "sleep 3600")
|
|
||||||
.addNewEnv()
|
|
||||||
.withName("DRIVER_NODE_MEMORY")
|
|
||||||
.withValue("1024m")
|
|
||||||
.withName("DRIVER_WEB_MEMORY")
|
|
||||||
.withValue("1024m")
|
|
||||||
.endEnv()
|
|
||||||
.withName(podName)
|
|
||||||
.withNewResources();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private File startLogPumping(InputStream stdOutIs, int podIdx, File podLogsDirectory, boolean printOutput) throws IOException {
|
|
||||||
File outputDir = new File(podLogsDirectory, taskToExecuteName);
|
|
||||||
outputDir.mkdirs();
|
|
||||||
File outputFile = new File(outputDir, "container-" + podIdx + ".log");
|
|
||||||
outputFile.createNewFile();
|
|
||||||
Thread loggingThread = new Thread(() -> {
|
|
||||||
try (BufferedWriter out = new BufferedWriter(new FileWriter(outputFile, true));
|
|
||||||
BufferedReader br = new BufferedReader(new InputStreamReader(stdOutIs))) {
|
|
||||||
String line;
|
|
||||||
while ((line = br.readLine()) != null) {
|
|
||||||
String toWrite = ("Container" + podIdx + ": " + line).trim();
|
|
||||||
if (printOutput) {
|
|
||||||
getProject().getLogger().lifecycle(toWrite);
|
|
||||||
}
|
|
||||||
out.write(line);
|
|
||||||
out.newLine();
|
|
||||||
}
|
|
||||||
} catch (IOException ignored) {
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
loggingThread.setDaemon(true);
|
|
||||||
loggingThread.start();
|
|
||||||
return outputFile;
|
|
||||||
}
|
|
||||||
|
|
||||||
private Watch attachStatusListenerToPod(Pod pod) {
|
|
||||||
KubernetesClient client = getKubernetesClient();
|
|
||||||
return client.pods().inNamespace(pod.getMetadata().getNamespace()).withName(pod.getMetadata().getName()).watch(new Watcher<Pod>() {
|
|
||||||
@Override
|
|
||||||
public void eventReceived(Watcher.Action action, Pod resource) {
|
|
||||||
getProject().getLogger().lifecycle("[StatusChange] pod " + resource.getMetadata().getName() + " " + action.name() + " (" + resource.getStatus().getPhase() + ")");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onClose(KubernetesClientException cause) {
|
|
||||||
client.close();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private void waitForPodToStart(Pod pod) {
|
|
||||||
try (KubernetesClient client = getKubernetesClient()) {
|
|
||||||
getProject().getLogger().lifecycle("Waiting for pod " + pod.getMetadata().getName() + " to start before executing build");
|
|
||||||
try {
|
|
||||||
client.pods().inNamespace(pod.getMetadata().getNamespace()).withName(pod.getMetadata().getName()).waitUntilReady(DEFAULT_POD_ALLOCATION_TIMEOUT, TimeUnit.MINUTES);
|
|
||||||
} catch (InterruptedException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
getProject().getLogger().lifecycle("pod " + pod.getMetadata().getName() + " has started, executing build");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private Collection<File> downloadTestXmlFromPod(String namespace, Pod cp) {
|
|
||||||
String resultsInContainerPath = TEST_RUN_DIR + "/test-reports";
|
|
||||||
String binaryResultsFile = "results.bin";
|
|
||||||
String podName = cp.getMetadata().getName();
|
|
||||||
Path tempDir = new File(new File(getProject().getBuildDir(), "test-results-xml"), podName).toPath();
|
|
||||||
|
|
||||||
if (!tempDir.toFile().exists()) {
|
|
||||||
tempDir.toFile().mkdirs();
|
|
||||||
}
|
|
||||||
getProject().getLogger().lifecycle("Saving " + podName + " results to: " + tempDir.toAbsolutePath().toFile().getAbsolutePath());
|
|
||||||
try (KubernetesClient client = getKubernetesClient()) {
|
|
||||||
client.pods()
|
|
||||||
.inNamespace(namespace)
|
|
||||||
.withName(podName)
|
|
||||||
.inContainer(podName)
|
|
||||||
.dir(resultsInContainerPath)
|
|
||||||
.copy(tempDir);
|
|
||||||
}
|
|
||||||
return findFolderContainingBinaryResultsFile(new File(tempDir.toFile().getAbsolutePath()), binaryResultsFile);
|
|
||||||
}
|
|
||||||
|
|
||||||
private String[] getBuildCommand(int numberOfPods, int podIdx) {
|
|
||||||
final String gitBranch = " -Dgit.branch=" + Properties.getGitBranch();
|
|
||||||
final String gitTargetBranch = " -Dgit.target.branch=" + Properties.getTargetGitBranch();
|
|
||||||
final String artifactoryUsername = " -Dartifactory.username=" + Properties.getUsername() + " ";
|
|
||||||
final String artifactoryPassword = " -Dartifactory.password=" + Properties.getPassword() + " ";
|
|
||||||
final String additionalArgs = this.additionalArgs.isEmpty() ? "" : String.join(" ", this.additionalArgs);
|
|
||||||
|
|
||||||
String shellScript = "(let x=1 ; while [ ${x} -ne 0 ] ; do echo \"Waiting for DNS\" ; curl services.gradle.org > /dev/null 2>&1 ; x=$? ; sleep 1 ; done ) && "
|
|
||||||
+ " cd /tmp/source && " +
|
|
||||||
"(let y=1 ; while [ ${y} -ne 0 ] ; do echo \"Preparing build directory\" ; ./gradlew testClasses integrationTestClasses --parallel 2>&1 ; y=$? ; sleep 1 ; done ) && " +
|
|
||||||
"(./gradlew -D" + ListTests.DISTRIBUTION_PROPERTY + "=" + distribution.name() +
|
|
||||||
gitBranch +
|
|
||||||
gitTargetBranch +
|
|
||||||
artifactoryUsername +
|
|
||||||
artifactoryPassword +
|
|
||||||
"-Dkubenetize -PdockerFork=" + podIdx + " -PdockerForks=" + numberOfPods + " " + fullTaskToExecutePath + " " + additionalArgs + " " + getLoggingLevel() + " 2>&1) ; " +
|
|
||||||
"let rs=$? ; sleep 10 ; exit ${rs}";
|
|
||||||
return new String[]{"bash", "-c", shellScript};
|
|
||||||
}
|
|
||||||
|
|
||||||
private String getLoggingLevel() {
|
|
||||||
|
|
||||||
switch (podLogLevel) {
|
|
||||||
case INFO:
|
|
||||||
return " --info";
|
|
||||||
case WARN:
|
|
||||||
return " --warn";
|
|
||||||
case QUIET:
|
|
||||||
return " --quiet";
|
|
||||||
case DEBUG:
|
|
||||||
return " --debug";
|
|
||||||
default:
|
|
||||||
throw new IllegalArgumentException("LogLevel: " + podLogLevel + " is unknown");
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
private List<File> findFolderContainingBinaryResultsFile(File start, String fileNameToFind) {
|
|
||||||
Queue<File> filesToInspect = new LinkedList<>(Collections.singletonList(start));
|
|
||||||
List<File> folders = new ArrayList<>();
|
|
||||||
while (!filesToInspect.isEmpty()) {
|
|
||||||
File fileToInspect = filesToInspect.poll();
|
|
||||||
if (fileToInspect.getAbsolutePath().endsWith(fileNameToFind)) {
|
|
||||||
folders.add(fileToInspect.getParentFile());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (fileToInspect.isDirectory()) {
|
|
||||||
filesToInspect.addAll(Arrays.stream(Optional.ofNullable(fileToInspect.listFiles()).orElse(new File[]{})).collect(Collectors.toList()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return folders;
|
|
||||||
}
|
|
||||||
|
|
||||||
private ExecListener buildExecListenerForPod(String podName, ByteArrayOutputStream errChannelStream, CompletableFuture<Integer> waitingFuture, KubernetesClient client) {
|
|
||||||
|
|
||||||
return new ExecListener() {
|
|
||||||
final Long start = System.currentTimeMillis();
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onOpen(Response response) {
|
|
||||||
getProject().getLogger().lifecycle("Build started on pod " + podName);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onFailure(Throwable t, Response response) {
|
|
||||||
getProject().getLogger().lifecycle("Received error from pod " + podName);
|
|
||||||
waitingFuture.completeExceptionally(t);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onClose(int code, String reason) {
|
|
||||||
getProject().getLogger().lifecycle("Received onClose() from pod " + podName + " , build took: " + ((System.currentTimeMillis() - start) / 1000) + " seconds");
|
|
||||||
try {
|
|
||||||
String errChannelContents = errChannelStream.toString();
|
|
||||||
Status status = Serialization.unmarshal(errChannelContents, Status.class);
|
|
||||||
Integer resultCode = Optional.ofNullable(status).map(Status::getDetails)
|
|
||||||
.map(StatusDetails::getCauses)
|
|
||||||
.flatMap(c -> c.stream().findFirst())
|
|
||||||
.map(StatusCause::getMessage)
|
|
||||||
.map(Integer::parseInt).orElse(0);
|
|
||||||
waitingFuture.complete(resultCode);
|
|
||||||
} catch (Exception e) {
|
|
||||||
waitingFuture.completeExceptionally(e);
|
|
||||||
} finally {
|
|
||||||
client.close();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,37 +0,0 @@
|
|||||||
package net.corda.testing;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Objects;
|
|
||||||
import java.util.Random;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
class ListShufflerAndAllocator {
|
|
||||||
|
|
||||||
private final List<String> tests;
|
|
||||||
|
|
||||||
public ListShufflerAndAllocator(List<String> tests) {
|
|
||||||
this.tests = new ArrayList<>(tests);
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<String> getTestsForFork(int fork, int forks, Integer seed) {
|
|
||||||
final Random shuffler = new Random(seed);
|
|
||||||
final List<String> copy = new ArrayList<>(tests);
|
|
||||||
while (copy.size() < forks) {
|
|
||||||
//pad the list
|
|
||||||
copy.add(null);
|
|
||||||
}
|
|
||||||
Collections.shuffle(copy, shuffler);
|
|
||||||
final int numberOfTestsPerFork = Math.max((copy.size() / forks), 1);
|
|
||||||
final int consumedTests = numberOfTestsPerFork * forks;
|
|
||||||
final int ourStartIdx = numberOfTestsPerFork * fork;
|
|
||||||
final int ourEndIdx = ourStartIdx + numberOfTestsPerFork;
|
|
||||||
final int ourSupplementaryIdx = consumedTests + fork;
|
|
||||||
final ArrayList<String> toReturn = new ArrayList<>(copy.subList(ourStartIdx, ourEndIdx));
|
|
||||||
if (ourSupplementaryIdx < copy.size()) {
|
|
||||||
toReturn.add(copy.get(ourSupplementaryIdx));
|
|
||||||
}
|
|
||||||
return toReturn.stream().filter(Objects::nonNull).collect(Collectors.toList());
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,89 +0,0 @@
|
|||||||
package net.corda.testing;
|
|
||||||
|
|
||||||
import io.github.classgraph.ClassGraph;
|
|
||||||
import io.github.classgraph.ClassInfo;
|
|
||||||
import io.github.classgraph.ClassInfoList;
|
|
||||||
import org.gradle.api.DefaultTask;
|
|
||||||
import org.gradle.api.file.FileCollection;
|
|
||||||
import org.gradle.api.tasks.TaskAction;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
|
|
||||||
import java.math.BigInteger;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.function.Function;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
import java.util.stream.Stream;
|
|
||||||
|
|
||||||
interface TestLister {
|
|
||||||
List<String> getAllTestsDiscovered();
|
|
||||||
}
|
|
||||||
|
|
||||||
public class ListTests extends DefaultTask implements TestLister {
|
|
||||||
|
|
||||||
public static final String DISTRIBUTION_PROPERTY = "distributeBy";
|
|
||||||
|
|
||||||
public FileCollection scanClassPath;
|
|
||||||
private List<String> allTests;
|
|
||||||
private DistributeTestsBy distribution = System.getProperty(DISTRIBUTION_PROPERTY) != null && !System.getProperty(DISTRIBUTION_PROPERTY).isEmpty() ?
|
|
||||||
DistributeTestsBy.valueOf(System.getProperty(DISTRIBUTION_PROPERTY)) : DistributeTestsBy.METHOD;
|
|
||||||
|
|
||||||
public List<String> getTestsForFork(int fork, int forks, Integer seed) {
|
|
||||||
BigInteger gitSha = new BigInteger(getProject().hasProperty("corda_revision") ?
|
|
||||||
getProject().property("corda_revision").toString() : "0", 36);
|
|
||||||
if (fork >= forks) {
|
|
||||||
throw new IllegalArgumentException("requested shard ${fork + 1} for total shards ${forks}");
|
|
||||||
}
|
|
||||||
int seedToUse = seed != null ? (seed + (this.getPath()).hashCode() + gitSha.intValue()) : 0;
|
|
||||||
return new ListShufflerAndAllocator(allTests).getTestsForFork(fork, forks, seedToUse);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public List<String> getAllTestsDiscovered() {
|
|
||||||
return new ArrayList<>(allTests);
|
|
||||||
}
|
|
||||||
|
|
||||||
@TaskAction
|
|
||||||
void discoverTests() {
|
|
||||||
Collection<String> results;
|
|
||||||
switch (distribution) {
|
|
||||||
case METHOD:
|
|
||||||
results = getClassGraphStreamOfTestClasses()
|
|
||||||
.map(classInfo -> classInfo.getMethodInfo().filter(methodInfo -> methodInfo.hasAnnotation("org.junit.Test"))
|
|
||||||
.stream().map(methodInfo -> classInfo.getName() + "." + methodInfo.getName()))
|
|
||||||
.flatMap(Function.identity())
|
|
||||||
.collect(Collectors.toSet());
|
|
||||||
|
|
||||||
this.allTests = results.stream().sorted().collect(Collectors.toList());
|
|
||||||
break;
|
|
||||||
case CLASS:
|
|
||||||
results = getClassGraphStreamOfTestClasses()
|
|
||||||
.map(ClassInfo::getName)
|
|
||||||
.collect(Collectors.toSet());
|
|
||||||
this.allTests = results.stream().sorted().collect(Collectors.toList());
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
private Stream<ClassInfo> getClassGraphStreamOfTestClasses() {
|
|
||||||
return new ClassGraph()
|
|
||||||
.enableClassInfo()
|
|
||||||
.enableMethodInfo()
|
|
||||||
.ignoreClassVisibility()
|
|
||||||
.ignoreMethodVisibility()
|
|
||||||
.enableAnnotationInfo()
|
|
||||||
.overrideClasspath(scanClassPath)
|
|
||||||
.scan()
|
|
||||||
.getClassesWithMethodAnnotation("org.junit.Test")
|
|
||||||
.stream()
|
|
||||||
.map(classInfo -> {
|
|
||||||
ClassInfoList returnList = new ClassInfoList();
|
|
||||||
returnList.add(classInfo);
|
|
||||||
returnList.addAll(classInfo.getSubclasses());
|
|
||||||
return returnList;
|
|
||||||
})
|
|
||||||
.flatMap(ClassInfoList::stream);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,115 +0,0 @@
|
|||||||
package net.corda.testing;
|
|
||||||
|
|
||||||
import org.gradle.api.DefaultTask;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
public class ParallelTestGroup extends DefaultTask {
|
|
||||||
|
|
||||||
private DistributeTestsBy distribution = DistributeTestsBy.METHOD;
|
|
||||||
private List<String> groups = new ArrayList<>();
|
|
||||||
private int shardCount = 20;
|
|
||||||
private int coresToUse = 4;
|
|
||||||
private int gbOfMemory = 4;
|
|
||||||
private boolean printToStdOut = true;
|
|
||||||
private PodLogLevel logLevel = PodLogLevel.INFO;
|
|
||||||
private String sidecarImage;
|
|
||||||
private List<String> additionalArgs = new ArrayList<>();
|
|
||||||
private List<String> taints = new ArrayList<>();
|
|
||||||
|
|
||||||
public DistributeTestsBy getDistribution() {
|
|
||||||
return distribution;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<String> getGroups() {
|
|
||||||
return groups;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getShardCount() {
|
|
||||||
return shardCount;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getCoresToUse() {
|
|
||||||
return coresToUse;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getGbOfMemory() {
|
|
||||||
return gbOfMemory;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean getPrintToStdOut() {
|
|
||||||
return printToStdOut;
|
|
||||||
}
|
|
||||||
|
|
||||||
public PodLogLevel getLogLevel() {
|
|
||||||
return logLevel;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getSidecarImage() {
|
|
||||||
return sidecarImage;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<String> getAdditionalArgs() {
|
|
||||||
return additionalArgs;
|
|
||||||
}
|
|
||||||
|
|
||||||
public List<String> getNodeTaints(){
|
|
||||||
return new ArrayList<>(taints);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void numberOfShards(int shards) {
|
|
||||||
this.shardCount = shards;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void podLogLevel(PodLogLevel level) {
|
|
||||||
this.logLevel = level;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void distribute(DistributeTestsBy dist) {
|
|
||||||
this.distribution = dist;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void coresPerFork(int cores) {
|
|
||||||
this.coresToUse = cores;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void memoryInGbPerFork(int gb) {
|
|
||||||
this.gbOfMemory = gb;
|
|
||||||
}
|
|
||||||
|
|
||||||
//when this is false, only containers will "failed" exit codes will be printed to stdout
|
|
||||||
public void streamOutput(boolean print) {
|
|
||||||
this.printToStdOut = print;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testGroups(String... group) {
|
|
||||||
testGroups(Arrays.asList(group));
|
|
||||||
}
|
|
||||||
|
|
||||||
private void testGroups(List<String> group) {
|
|
||||||
groups.addAll(group);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void sidecarImage(String sidecarImage) {
|
|
||||||
this.sidecarImage = sidecarImage;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void additionalArgs(String... additionalArgs) {
|
|
||||||
additionalArgs(Arrays.asList(additionalArgs));
|
|
||||||
}
|
|
||||||
|
|
||||||
private void additionalArgs(List<String> additionalArgs) {
|
|
||||||
this.additionalArgs.addAll(additionalArgs);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void nodeTaints(String... additionalArgs) {
|
|
||||||
nodeTaints(Arrays.asList(additionalArgs));
|
|
||||||
}
|
|
||||||
|
|
||||||
private void nodeTaints(List<String> additionalArgs) {
|
|
||||||
this.taints.addAll(additionalArgs);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,147 +0,0 @@
|
|||||||
package net.corda.testing;
|
|
||||||
|
|
||||||
import io.fabric8.kubernetes.api.model.Quantity;
|
|
||||||
import io.fabric8.kubernetes.api.model.TolerationBuilder;
|
|
||||||
import io.fabric8.kubernetes.api.model.batch.Job;
|
|
||||||
import io.fabric8.kubernetes.api.model.batch.JobBuilder;
|
|
||||||
import io.fabric8.kubernetes.client.Config;
|
|
||||||
import io.fabric8.kubernetes.client.ConfigBuilder;
|
|
||||||
import io.fabric8.kubernetes.client.DefaultKubernetesClient;
|
|
||||||
import io.fabric8.kubernetes.client.KubernetesClient;
|
|
||||||
import io.fabric8.kubernetes.client.KubernetesClientException;
|
|
||||||
import io.fabric8.kubernetes.client.Watch;
|
|
||||||
import io.fabric8.kubernetes.client.Watcher;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import java.util.Comparator;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.concurrent.CompletableFuture;
|
|
||||||
import java.util.concurrent.ExecutionException;
|
|
||||||
import java.util.concurrent.TimeUnit;
|
|
||||||
import java.util.concurrent.TimeoutException;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
import java.util.stream.IntStream;
|
|
||||||
import java.util.stream.Stream;
|
|
||||||
|
|
||||||
public class PodAllocator {
|
|
||||||
|
|
||||||
private static final int CONNECTION_TIMEOUT = 60_1000;
|
|
||||||
private final Logger logger;
|
|
||||||
|
|
||||||
public PodAllocator(Logger logger) {
|
|
||||||
this.logger = logger;
|
|
||||||
}
|
|
||||||
|
|
||||||
public PodAllocator() {
|
|
||||||
this.logger = LoggerFactory.getLogger(PodAllocator.class);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void allocatePods(Integer number,
|
|
||||||
Integer coresPerPod,
|
|
||||||
Integer memoryPerPod,
|
|
||||||
String prefix,
|
|
||||||
List<String> taints) {
|
|
||||||
|
|
||||||
Config config = getConfig();
|
|
||||||
KubernetesClient client = new DefaultKubernetesClient(config);
|
|
||||||
|
|
||||||
List<Job> podsToRequest = IntStream.range(0, number).mapToObj(i -> buildJob("pa-" + prefix + i, coresPerPod, memoryPerPod, taints)).collect(Collectors.toList());
|
|
||||||
List<Job> createdJobs = podsToRequest.stream().map(requestedJob -> {
|
|
||||||
String msg = "PreAllocating " + requestedJob.getMetadata().getName();
|
|
||||||
if (logger instanceof org.gradle.api.logging.Logger) {
|
|
||||||
((org.gradle.api.logging.Logger) logger).quiet(msg);
|
|
||||||
} else {
|
|
||||||
logger.info(msg);
|
|
||||||
}
|
|
||||||
return client.batch().jobs().inNamespace(KubesTest.NAMESPACE).create(requestedJob);
|
|
||||||
}).collect(Collectors.toList());
|
|
||||||
|
|
||||||
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
|
|
||||||
KubernetesClient tearDownClient = new DefaultKubernetesClient(getConfig());
|
|
||||||
tearDownClient.batch().jobs().delete(createdJobs);
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
private Config getConfig() {
|
|
||||||
return new ConfigBuilder()
|
|
||||||
.withConnectionTimeout(CONNECTION_TIMEOUT)
|
|
||||||
.withRequestTimeout(CONNECTION_TIMEOUT)
|
|
||||||
.withRollingTimeout(CONNECTION_TIMEOUT)
|
|
||||||
.withWebsocketTimeout(CONNECTION_TIMEOUT)
|
|
||||||
.withWebsocketPingInterval(CONNECTION_TIMEOUT)
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void tearDownPods(String prefix) {
|
|
||||||
io.fabric8.kubernetes.client.Config config = getConfig();
|
|
||||||
KubernetesClient client = new DefaultKubernetesClient(config);
|
|
||||||
Stream<Job> jobsToDelete = client.batch().jobs().inNamespace(KubesTest.NAMESPACE).list()
|
|
||||||
.getItems()
|
|
||||||
.stream()
|
|
||||||
.sorted(Comparator.comparing(p -> p.getMetadata().getName()))
|
|
||||||
.filter(foundPod -> foundPod.getMetadata().getName().contains(prefix));
|
|
||||||
|
|
||||||
List<CompletableFuture<Job>> deleteFutures = jobsToDelete.map(job -> {
|
|
||||||
CompletableFuture<Job> result = new CompletableFuture<>();
|
|
||||||
Watch watch = client.batch().jobs().inNamespace(job.getMetadata().getNamespace()).withName(job.getMetadata().getName()).watch(new Watcher<Job>() {
|
|
||||||
@Override
|
|
||||||
public void eventReceived(Action action, Job resource) {
|
|
||||||
if (action == Action.DELETED) {
|
|
||||||
result.complete(resource);
|
|
||||||
String msg = "Successfully deleted job " + job.getMetadata().getName();
|
|
||||||
logger.info(msg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void onClose(KubernetesClientException cause) {
|
|
||||||
String message = "Failed to delete job " + job.getMetadata().getName();
|
|
||||||
if (logger instanceof org.gradle.api.logging.Logger) {
|
|
||||||
((org.gradle.api.logging.Logger) logger).error(message);
|
|
||||||
} else {
|
|
||||||
logger.info(message);
|
|
||||||
}
|
|
||||||
result.completeExceptionally(cause);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
client.batch().jobs().delete(job);
|
|
||||||
return result;
|
|
||||||
}).collect(Collectors.toList());
|
|
||||||
|
|
||||||
try {
|
|
||||||
CompletableFuture.allOf(deleteFutures.toArray(new CompletableFuture[0])).get(5, TimeUnit.MINUTES);
|
|
||||||
} catch (InterruptedException | ExecutionException | TimeoutException e) {
|
|
||||||
//ignore - there's nothing left to do
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Job buildJob(String podName, Integer coresPerPod, Integer memoryPerPod, List<String> taints) {
|
|
||||||
return new JobBuilder().withNewMetadata().withName(podName).endMetadata()
|
|
||||||
.withNewSpec()
|
|
||||||
.withTtlSecondsAfterFinished(10)
|
|
||||||
.withNewTemplate()
|
|
||||||
.withNewMetadata()
|
|
||||||
.withName(podName + "-pod")
|
|
||||||
.endMetadata()
|
|
||||||
.withNewSpec()
|
|
||||||
.withTolerations(taints.stream().map(taint -> new TolerationBuilder().withKey("key").withValue(taint).withOperator("Equal").withEffect("NoSchedule").build()).collect(Collectors.toList()))
|
|
||||||
.addNewContainer()
|
|
||||||
.withImage("busybox:latest")
|
|
||||||
.withCommand("sh")
|
|
||||||
.withArgs("-c", "sleep 300")
|
|
||||||
.withName(podName)
|
|
||||||
.withNewResources()
|
|
||||||
.addToRequests("cpu", new Quantity(coresPerPod.toString()))
|
|
||||||
.addToRequests("memory", new Quantity(memoryPerPod.toString() + "Gi"))
|
|
||||||
.endResources()
|
|
||||||
.endContainer()
|
|
||||||
.withRestartPolicy("Never")
|
|
||||||
.endSpec()
|
|
||||||
.endTemplate()
|
|
||||||
.endSpec()
|
|
||||||
.build();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,5 +0,0 @@
|
|||||||
package net.corda.testing;
|
|
||||||
|
|
||||||
public enum PodLogLevel {
|
|
||||||
QUIET, WARN, INFO, DEBUG
|
|
||||||
}
|
|
@ -1,91 +0,0 @@
|
|||||||
package net.corda.testing;
|
|
||||||
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A single class to hold some of the properties we need to get from the command line
|
|
||||||
* in order to store test results in Artifactory.
|
|
||||||
*/
|
|
||||||
public class Properties {
|
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(Properties.class);
|
|
||||||
|
|
||||||
private static String ROOT_PROJECT_TYPE = "corda"; // corda or enterprise
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the Corda type. Used in the tag names when we store in Artifactory.
|
|
||||||
*
|
|
||||||
* @return either 'corda' or 'enterprise'
|
|
||||||
*/
|
|
||||||
static String getRootProjectType() {
|
|
||||||
return ROOT_PROJECT_TYPE;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the Corda (repo) type - either enterprise, or corda (open-source).
|
|
||||||
* Used in the tag names when we store in Artifactory.
|
|
||||||
*
|
|
||||||
* @param rootProjectType the corda repo type.
|
|
||||||
*/
|
|
||||||
static void setRootProjectType(@NotNull final String rootProjectType) {
|
|
||||||
ROOT_PROJECT_TYPE = rootProjectType;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get property with logging
|
|
||||||
*
|
|
||||||
* @param key property to get
|
|
||||||
* @return empty string, or trimmed value
|
|
||||||
*/
|
|
||||||
@NotNull
|
|
||||||
static String getProperty(@NotNull final String key) {
|
|
||||||
final String value = System.getProperty(key, "").trim();
|
|
||||||
if (value.isEmpty()) {
|
|
||||||
LOG.debug("Property '{}' not set", key);
|
|
||||||
} else {
|
|
||||||
LOG.debug("Ok. Property '{}' is set", key);
|
|
||||||
}
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get Artifactory username
|
|
||||||
*
|
|
||||||
* @return the username
|
|
||||||
*/
|
|
||||||
static String getUsername() {
|
|
||||||
return getProperty("artifactory.username");
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get Artifactory password
|
|
||||||
*
|
|
||||||
* @return the password
|
|
||||||
*/
|
|
||||||
static String getPassword() {
|
|
||||||
return getProperty("artifactory.password");
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The current branch/tag
|
|
||||||
*
|
|
||||||
* @return the current branch
|
|
||||||
*/
|
|
||||||
@NotNull
|
|
||||||
static String getGitBranch() {
|
|
||||||
return getProperty("git.branch").replace('/', '-');
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @return the branch that this branch was likely checked out from.
|
|
||||||
*/
|
|
||||||
@NotNull
|
|
||||||
static String getTargetGitBranch() {
|
|
||||||
return getProperty("git.target.branch").replace('/', '-');
|
|
||||||
}
|
|
||||||
|
|
||||||
static boolean getPublishJunitTests() {
|
|
||||||
return ! getProperty("publish.junit").isEmpty();
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,430 +0,0 @@
|
|||||||
package net.corda.testing;
|
|
||||||
|
|
||||||
import groovy.lang.Tuple2;
|
|
||||||
import org.apache.commons.compress.archivers.ArchiveEntry;
|
|
||||||
import org.apache.commons.compress.archivers.ArchiveException;
|
|
||||||
import org.apache.commons.compress.archivers.ArchiveInputStream;
|
|
||||||
import org.apache.commons.compress.archivers.ArchiveStreamFactory;
|
|
||||||
import org.apache.commons.compress.utils.IOUtils;
|
|
||||||
import org.gradle.api.Project;
|
|
||||||
import org.gradle.api.Task;
|
|
||||||
import org.gradle.api.tasks.bundling.Zip;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
import org.jetbrains.annotations.Nullable;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
import org.w3c.dom.Document;
|
|
||||||
import org.w3c.dom.NamedNodeMap;
|
|
||||||
import org.w3c.dom.Node;
|
|
||||||
import org.w3c.dom.NodeList;
|
|
||||||
import org.xml.sax.SAXException;
|
|
||||||
|
|
||||||
import javax.xml.parsers.DocumentBuilder;
|
|
||||||
import javax.xml.parsers.DocumentBuilderFactory;
|
|
||||||
import javax.xml.parsers.ParserConfigurationException;
|
|
||||||
import javax.xml.xpath.XPath;
|
|
||||||
import javax.xml.xpath.XPathConstants;
|
|
||||||
import javax.xml.xpath.XPathExpression;
|
|
||||||
import javax.xml.xpath.XPathExpressionException;
|
|
||||||
import javax.xml.xpath.XPathFactory;
|
|
||||||
import java.io.BufferedInputStream;
|
|
||||||
import java.io.ByteArrayInputStream;
|
|
||||||
import java.io.ByteArrayOutputStream;
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.FileInputStream;
|
|
||||||
import java.io.FileNotFoundException;
|
|
||||||
import java.io.FileWriter;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.InputStream;
|
|
||||||
import java.io.InputStreamReader;
|
|
||||||
import java.io.OutputStream;
|
|
||||||
import java.nio.file.FileSystems;
|
|
||||||
import java.nio.file.FileVisitResult;
|
|
||||||
import java.nio.file.FileVisitor;
|
|
||||||
import java.nio.file.Files;
|
|
||||||
import java.nio.file.Path;
|
|
||||||
import java.nio.file.PathMatcher;
|
|
||||||
import java.nio.file.attribute.BasicFileAttributes;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.function.BiFunction;
|
|
||||||
import java.util.function.Supplier;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get or put test artifacts to/from a REST endpoint. The expected format is a zip file of junit XML files.
|
|
||||||
* See https://www.jfrog.com/confluence/display/RTF/Artifactory+REST+API
|
|
||||||
*/
|
|
||||||
public class TestDurationArtifacts {
|
|
||||||
private static final String EXTENSION = "zip";
|
|
||||||
private static final String BASE_URL = "https://software.r3.com/artifactory/corda-test-results/net/corda";
|
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(TestDurationArtifacts.class);
|
|
||||||
private static final String ARTIFACT = "tests-durations";
|
|
||||||
// The one and only set of tests information. We load these at the start of a build, and update them and save them at the end.
|
|
||||||
static Tests tests = new Tests();
|
|
||||||
|
|
||||||
// Artifactory API
|
|
||||||
private final Artifactory artifactory = new Artifactory();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Write out the test durations as a CSV file.
|
|
||||||
* Reload the tests from artifactory and update with the latest run.
|
|
||||||
*
|
|
||||||
* @param project project that we are attaching the test to.
|
|
||||||
* @param name basename for the test.
|
|
||||||
* @return the csv task
|
|
||||||
*/
|
|
||||||
private static Task createCsvTask(@NotNull final Project project, @NotNull final String name) {
|
|
||||||
return project.getTasks().create("createCsvFromXmlFor" + capitalize(name), Task.class, t -> {
|
|
||||||
t.setGroup(DistributedTesting.GRADLE_GROUP);
|
|
||||||
t.setDescription("Create csv from all discovered junit xml files");
|
|
||||||
|
|
||||||
// Parse all the junit results and write them to a csv file.
|
|
||||||
t.doFirst(task -> {
|
|
||||||
project.getLogger().warn("About to create CSV file and zip it");
|
|
||||||
|
|
||||||
// Reload the test object from artifactory
|
|
||||||
loadTests();
|
|
||||||
// Get the list of junit xml artifacts
|
|
||||||
final List<Path> testXmlFiles = getTestXmlFiles(project.getBuildDir().getAbsoluteFile().toPath());
|
|
||||||
project.getLogger().warn("Found {} xml junit files", testXmlFiles.size());
|
|
||||||
|
|
||||||
// Read test xml files for tests and duration and add them to the `Tests` object
|
|
||||||
// This adjusts the runCount and over all average duration for existing tests.
|
|
||||||
for (Path testResult : testXmlFiles) {
|
|
||||||
try {
|
|
||||||
final List<Tuple2<String, Long>> unitTests = fromJunitXml(new FileInputStream(testResult.toFile()));
|
|
||||||
|
|
||||||
// Add the non-zero duration tests to build up an average.
|
|
||||||
unitTests.stream()
|
|
||||||
.filter(t2 -> t2.getSecond() > 0L)
|
|
||||||
.forEach(unitTest -> tests.addDuration(unitTest.getFirst(), unitTest.getSecond()));
|
|
||||||
|
|
||||||
final long meanDurationForTests = tests.getMeanDurationForTests();
|
|
||||||
|
|
||||||
// Add the zero duration tests using the mean value so they are fairly distributed over the pods in the next run.
|
|
||||||
// If we used 'zero' they would all be added to the smallest bucket.
|
|
||||||
unitTests.stream()
|
|
||||||
.filter(t2 -> t2.getSecond() <= 0L)
|
|
||||||
.forEach(unitTest -> tests.addDuration(unitTest.getFirst(), meanDurationForTests));
|
|
||||||
|
|
||||||
} catch (FileNotFoundException ignored) {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Write the test file to disk.
|
|
||||||
try {
|
|
||||||
final FileWriter writer = new FileWriter(new File(project.getRootDir(), ARTIFACT + ".csv"));
|
|
||||||
tests.write(writer);
|
|
||||||
LOG.warn("Written tests csv file with {} tests", tests.size());
|
|
||||||
} catch (IOException ignored) {
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
static String capitalize(@NotNull final String str) {
|
|
||||||
return str.substring(0, 1).toUpperCase() + str.substring(1); // groovy has this as an extension method
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Discover junit xml files, zip them, and upload to artifactory.
|
|
||||||
*
|
|
||||||
* @param project root project
|
|
||||||
* @param name task name that we're 'extending'
|
|
||||||
* @return gradle task
|
|
||||||
*/
|
|
||||||
@NotNull
|
|
||||||
private static Task createJunitZipTask(@NotNull final Project project, @NotNull final String name) {
|
|
||||||
return project.getTasks().create("zipJunitXmlFilesAndUploadFor" + capitalize(name), Zip.class, z -> {
|
|
||||||
z.setGroup(DistributedTesting.GRADLE_GROUP);
|
|
||||||
z.setDescription("Zip junit files and upload to artifactory");
|
|
||||||
|
|
||||||
z.getArchiveFileName().set(Artifactory.getFileName("junit", EXTENSION, getBranchTag()));
|
|
||||||
z.getDestinationDirectory().set(project.getRootDir());
|
|
||||||
z.setIncludeEmptyDirs(false);
|
|
||||||
z.from(project.getRootDir(), task -> task.include("**/build/test-results-xml/**/*.xml", "**/build/test-results/**/*.xml"));
|
|
||||||
z.doLast(task -> {
|
|
||||||
try (FileInputStream inputStream = new FileInputStream(new File(z.getArchiveFileName().get()))) {
|
|
||||||
new Artifactory().put(BASE_URL, getBranchTag(), "junit", EXTENSION, inputStream);
|
|
||||||
} catch (Exception ignored) {
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Zip and upload test-duration csv files to artifactory
|
|
||||||
*
|
|
||||||
* @param project root project that we're attaching the task to
|
|
||||||
* @param name the task name we're 'extending'
|
|
||||||
* @return gradle task
|
|
||||||
*/
|
|
||||||
@NotNull
|
|
||||||
private static Task createCsvZipAndUploadTask(@NotNull final Project project, @NotNull final String name) {
|
|
||||||
return project.getTasks().create("zipCsvFilesAndUploadFor" + capitalize(name), Zip.class, z -> {
|
|
||||||
z.setGroup(DistributedTesting.GRADLE_GROUP);
|
|
||||||
z.setDescription("Zips test duration csv and uploads to artifactory");
|
|
||||||
|
|
||||||
z.getArchiveFileName().set(Artifactory.getFileName(ARTIFACT, EXTENSION, getBranchTag()));
|
|
||||||
z.getDestinationDirectory().set(project.getRootDir());
|
|
||||||
z.setIncludeEmptyDirs(false);
|
|
||||||
|
|
||||||
// There's only one csv, but glob it anyway.
|
|
||||||
z.from(project.getRootDir(), task -> task.include("**/" + ARTIFACT + ".csv"));
|
|
||||||
|
|
||||||
// ...base class method zips the CSV...
|
|
||||||
|
|
||||||
z.doLast(task -> {
|
|
||||||
// We've now created the one csv file containing the tests and their mean durations,
|
|
||||||
// this task has zipped it, so we now just upload it.
|
|
||||||
project.getLogger().warn("SAVING tests");
|
|
||||||
project.getLogger().warn("Attempting to upload {}", z.getArchiveFileName().get());
|
|
||||||
try (FileInputStream inputStream = new FileInputStream(new File(z.getArchiveFileName().get()))) {
|
|
||||||
if (!new TestDurationArtifacts().put(getBranchTag(), inputStream)) {
|
|
||||||
project.getLogger().warn("Could not upload zip of tests");
|
|
||||||
} else {
|
|
||||||
project.getLogger().warn("SAVED tests");
|
|
||||||
}
|
|
||||||
} catch (Exception e) {
|
|
||||||
project.getLogger().warn("Problem trying to upload: {} {}", z.getArchiveFileName().get(), e.toString());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create the Gradle Zip task to gather test information
|
|
||||||
*
|
|
||||||
* @param project project to attach this task to
|
|
||||||
* @param name name of the task
|
|
||||||
* @param task a task that we depend on when creating the csv so Gradle produces the correct task graph.
|
|
||||||
* @return a reference to the created task.
|
|
||||||
*/
|
|
||||||
@NotNull
|
|
||||||
public static Task createZipTask(@NotNull final Project project, @NotNull final String name, @Nullable final Task task) {
|
|
||||||
final Task csvTask = createCsvTask(project, name);
|
|
||||||
|
|
||||||
if (Properties.getPublishJunitTests()) {
|
|
||||||
final Task zipJunitTask = createJunitZipTask(project, name);
|
|
||||||
csvTask.dependsOn(zipJunitTask);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (task != null) {
|
|
||||||
csvTask.dependsOn(task);
|
|
||||||
}
|
|
||||||
final Task zipCsvTask = createCsvZipAndUploadTask(project, name);
|
|
||||||
zipCsvTask.dependsOn(csvTask); // we have to create the csv before we can zip it.
|
|
||||||
|
|
||||||
return zipCsvTask;
|
|
||||||
}
|
|
||||||
|
|
||||||
static List<Path> getTestXmlFiles(@NotNull final Path rootDir) {
|
|
||||||
List<Path> paths = new ArrayList<>();
|
|
||||||
List<PathMatcher> matchers = new ArrayList<>();
|
|
||||||
matchers.add(FileSystems.getDefault().getPathMatcher("glob:**/test-results-xml/**/*.xml"));
|
|
||||||
try {
|
|
||||||
Files.walkFileTree(rootDir, new FileVisitor<Path>() {
|
|
||||||
@Override
|
|
||||||
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) {
|
|
||||||
return FileVisitResult.CONTINUE;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) {
|
|
||||||
for (PathMatcher matcher : matchers) {
|
|
||||||
if (matcher.matches(file)) {
|
|
||||||
paths.add(file);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return FileVisitResult.CONTINUE;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public FileVisitResult visitFileFailed(Path file, IOException exc) {
|
|
||||||
return FileVisitResult.CONTINUE;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public FileVisitResult postVisitDirectory(Path dir, IOException exc) {
|
|
||||||
return FileVisitResult.CONTINUE;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} catch (IOException e) {
|
|
||||||
LOG.warn("Could not walk tree and get all test xml files: {}", e.toString());
|
|
||||||
}
|
|
||||||
return paths;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Unzip test results in memory and return test names and durations.
|
|
||||||
* Assumes the input stream contains only csv files of the correct format.
|
|
||||||
*
|
|
||||||
* @param tests reference to the Tests object to be populated.
|
|
||||||
* @param zippedInputStream stream containing zipped result file(s)
|
|
||||||
*/
|
|
||||||
static void addTestsFromZippedCsv(@NotNull final Tests tests,
|
|
||||||
@NotNull final InputStream zippedInputStream) {
|
|
||||||
// We need this because ArchiveStream requires the `mark` functionality which is supported in buffered streams.
|
|
||||||
final BufferedInputStream bufferedInputStream = new BufferedInputStream(zippedInputStream);
|
|
||||||
try (ArchiveInputStream archiveInputStream = new ArchiveStreamFactory().createArchiveInputStream(bufferedInputStream)) {
|
|
||||||
ArchiveEntry e;
|
|
||||||
while ((e = archiveInputStream.getNextEntry()) != null) {
|
|
||||||
if (e.isDirectory()) continue;
|
|
||||||
|
|
||||||
// We seem to need to take a copy of the original input stream (as positioned by the ArchiveEntry), because
|
|
||||||
// the XML parsing closes the stream after it has finished. This has the side effect of only parsing the first
|
|
||||||
// entry in the archive.
|
|
||||||
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
|
|
||||||
IOUtils.copy(archiveInputStream, outputStream);
|
|
||||||
ByteArrayInputStream byteInputStream = new ByteArrayInputStream(outputStream.toByteArray());
|
|
||||||
|
|
||||||
// Read the tests from the (csv) stream
|
|
||||||
final InputStreamReader reader = new InputStreamReader(byteInputStream);
|
|
||||||
|
|
||||||
// Add the tests to the Tests object
|
|
||||||
tests.addTests(Tests.read(reader));
|
|
||||||
}
|
|
||||||
} catch (ArchiveException | IOException e) {
|
|
||||||
LOG.warn("Problem unzipping XML test results");
|
|
||||||
}
|
|
||||||
|
|
||||||
LOG.debug("Discovered {} tests", tests.size());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* For a given stream, return the testcase names and durations.
|
|
||||||
* <p>
|
|
||||||
* NOTE: the input stream will be closed by this method.
|
|
||||||
*
|
|
||||||
* @param inputStream an InputStream, closed once parsed
|
|
||||||
* @return a list of test names and their durations in nanos.
|
|
||||||
*/
|
|
||||||
@NotNull
|
|
||||||
static List<Tuple2<String, Long>> fromJunitXml(@NotNull final InputStream inputStream) {
|
|
||||||
final DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
|
|
||||||
final List<Tuple2<String, Long>> results = new ArrayList<>();
|
|
||||||
|
|
||||||
try {
|
|
||||||
final DocumentBuilder builder = dbFactory.newDocumentBuilder();
|
|
||||||
final Document document = builder.parse(inputStream);
|
|
||||||
document.getDocumentElement().normalize();
|
|
||||||
final XPathFactory xPathfactory = XPathFactory.newInstance();
|
|
||||||
final XPath xpath = xPathfactory.newXPath();
|
|
||||||
final XPathExpression expression = xpath.compile("//testcase");
|
|
||||||
final NodeList nodeList = (NodeList) expression.evaluate(document, XPathConstants.NODESET);
|
|
||||||
|
|
||||||
final BiFunction<NamedNodeMap, String, String> get =
|
|
||||||
(a, k) -> a.getNamedItem(k) != null ? a.getNamedItem(k).getNodeValue() : "";
|
|
||||||
|
|
||||||
for (int i = 0; i < nodeList.getLength(); i++) {
|
|
||||||
final Node item = nodeList.item(i);
|
|
||||||
final NamedNodeMap attributes = item.getAttributes();
|
|
||||||
final String testName = get.apply(attributes, "name");
|
|
||||||
final String testDuration = get.apply(attributes, "time");
|
|
||||||
final String testClassName = get.apply(attributes, "classname");
|
|
||||||
|
|
||||||
// If the test doesn't have a duration (it should), we return zero.
|
|
||||||
if (!(testName.isEmpty() || testClassName.isEmpty())) {
|
|
||||||
final long nanos = !testDuration.isEmpty() ? (long) (Double.parseDouble(testDuration) * 1_000_000_000.0) : 0L;
|
|
||||||
results.add(new Tuple2<>(testClassName + "." + testName, nanos));
|
|
||||||
} else {
|
|
||||||
LOG.warn("Bad test in junit xml: name={} className={}", testName, testClassName);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (ParserConfigurationException | IOException | XPathExpressionException | SAXException e) {
|
|
||||||
return Collections.emptyList();
|
|
||||||
}
|
|
||||||
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A supplier of tests.
|
|
||||||
* <p>
|
|
||||||
* We get them from Artifactory and then parse the test xml files to get the duration.
|
|
||||||
*
|
|
||||||
* @return a supplier of test results
|
|
||||||
*/
|
|
||||||
@NotNull
|
|
||||||
static Supplier<Tests> getTestsSupplier() {
|
|
||||||
return TestDurationArtifacts::loadTests;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* we need to prepend the project type so that we have a unique tag for artifactory
|
|
||||||
*
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
static String getBranchTag() {
|
|
||||||
return (Properties.getRootProjectType() + "-" + Properties.getGitBranch()).replace('.', '-');
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* we need to prepend the project type so that we have a unique tag artifactory
|
|
||||||
*
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
static String getTargetBranchTag() {
|
|
||||||
return (Properties.getRootProjectType() + "-" + Properties.getTargetGitBranch()).replace('.', '-');
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Load the tests from Artifactory, in-memory. No temp file used. Existing test data is cleared.
|
|
||||||
*
|
|
||||||
* @return a reference to the loaded tests.
|
|
||||||
*/
|
|
||||||
static Tests loadTests() {
|
|
||||||
LOG.warn("LOADING previous test runs from Artifactory");
|
|
||||||
tests.clear();
|
|
||||||
try {
|
|
||||||
final TestDurationArtifacts testArtifacts = new TestDurationArtifacts();
|
|
||||||
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
|
|
||||||
|
|
||||||
// Try getting artifacts for our branch, if not, try the target branch.
|
|
||||||
if (!testArtifacts.get(getBranchTag(), outputStream)) {
|
|
||||||
outputStream = new ByteArrayOutputStream();
|
|
||||||
LOG.warn("Could not get tests from Artifactory for tag {}, trying {}", getBranchTag(), getTargetBranchTag());
|
|
||||||
if (!testArtifacts.get(getTargetBranchTag(), outputStream)) {
|
|
||||||
LOG.warn("Could not get any tests from Artifactory");
|
|
||||||
return tests;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ByteArrayInputStream inputStream = new ByteArrayInputStream(outputStream.toByteArray());
|
|
||||||
addTestsFromZippedCsv(tests, inputStream);
|
|
||||||
LOG.warn("Got {} tests from Artifactory", tests.size());
|
|
||||||
return tests;
|
|
||||||
} catch (Exception e) { // was IOException
|
|
||||||
LOG.warn(e.toString());
|
|
||||||
LOG.warn("Could not get tests from Artifactory");
|
|
||||||
return tests;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get tests for the specified tag in the outputStream
|
|
||||||
*
|
|
||||||
* @param theTag tag for tests
|
|
||||||
* @param outputStream stream of zipped xml files
|
|
||||||
* @return false if we fail to get the tests
|
|
||||||
*/
|
|
||||||
private boolean get(@NotNull final String theTag, @NotNull final OutputStream outputStream) {
|
|
||||||
return artifactory.get(BASE_URL, theTag, ARTIFACT, "zip", outputStream);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Upload the supplied tests
|
|
||||||
*
|
|
||||||
* @param theTag tag for tests
|
|
||||||
* @param inputStream stream of zipped xml files.
|
|
||||||
* @return true if we succeed
|
|
||||||
*/
|
|
||||||
private boolean put(@NotNull final String theTag, @NotNull final InputStream inputStream) {
|
|
||||||
return artifactory.put(BASE_URL, theTag, ARTIFACT, EXTENSION, inputStream);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,213 +0,0 @@
|
|||||||
package net.corda.testing;
|
|
||||||
|
|
||||||
import groovy.lang.Tuple2;
|
|
||||||
import groovy.lang.Tuple3;
|
|
||||||
import org.apache.commons.csv.CSVFormat;
|
|
||||||
import org.apache.commons.csv.CSVPrinter;
|
|
||||||
import org.apache.commons.csv.CSVRecord;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.Reader;
|
|
||||||
import java.io.Writer;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.Comparator;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Objects;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
public class Tests {
|
|
||||||
final static String TEST_NAME = "Test Name";
|
|
||||||
final static String MEAN_DURATION_NANOS = "Mean Duration Nanos";
|
|
||||||
final static String NUMBER_OF_RUNS = "Number of runs";
|
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(Tests.class);
|
|
||||||
// test name -> (mean duration, number of runs)
|
|
||||||
private final Map<String, Tuple2<Long, Long>> tests = new HashMap<>();
|
|
||||||
// If we don't have any tests from which to get a mean, use this.
|
|
||||||
static long DEFAULT_MEAN_NANOS = 1000L;
|
|
||||||
|
|
||||||
private static Tuple2<Long, Long> DEFAULT_MEAN_TUPLE = new Tuple2<>(DEFAULT_MEAN_NANOS, 0L);
|
|
||||||
// mean, count
|
|
||||||
private Tuple2<Long, Long> meanForTests = DEFAULT_MEAN_TUPLE;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Read tests, mean duration and runs from a csv file.
|
|
||||||
*
|
|
||||||
* @param reader a reader
|
|
||||||
* @return list of tests, or an empty list if none or we have a problem.
|
|
||||||
*/
|
|
||||||
public static List<Tuple3<String, Long, Long>> read(Reader reader) {
|
|
||||||
try {
|
|
||||||
List<CSVRecord> records = CSVFormat.DEFAULT.withHeader().parse(reader).getRecords();
|
|
||||||
return records.stream().map(record -> {
|
|
||||||
try {
|
|
||||||
final String testName = record.get(TEST_NAME);
|
|
||||||
final long testDuration = Long.parseLong(record.get(MEAN_DURATION_NANOS));
|
|
||||||
final long testRuns = Long.parseLong(record.get(NUMBER_OF_RUNS)); // can't see how we would have zero tbh.
|
|
||||||
return new Tuple3<>(testName, testDuration, Math.max(testRuns, 1));
|
|
||||||
} catch (IllegalArgumentException | IllegalStateException e) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}).filter(Objects::nonNull).sorted(Comparator.comparing(Tuple3::getFirst)).collect(Collectors.toList());
|
|
||||||
} catch (IOException ignored) {
|
|
||||||
|
|
||||||
}
|
|
||||||
return Collections.emptyList();
|
|
||||||
}
|
|
||||||
|
|
||||||
private static Tuple2<Long, Long> recalculateMean(@NotNull final Tuple2<Long, Long> previous, long nanos) {
|
|
||||||
final long total = previous.getFirst() * previous.getSecond() + nanos;
|
|
||||||
final long count = previous.getSecond() + 1;
|
|
||||||
return new Tuple2<>(total / count, count);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Write a csv file of test name, duration, runs
|
|
||||||
*
|
|
||||||
* @param writer a writer
|
|
||||||
* @return true if no problems.
|
|
||||||
*/
|
|
||||||
public boolean write(@NotNull final Writer writer) {
|
|
||||||
boolean ok = true;
|
|
||||||
final CSVPrinter printer;
|
|
||||||
try {
|
|
||||||
printer = new CSVPrinter(writer,
|
|
||||||
CSVFormat.DEFAULT.withHeader(TEST_NAME, MEAN_DURATION_NANOS, NUMBER_OF_RUNS));
|
|
||||||
for (String key : tests.keySet()) {
|
|
||||||
printer.printRecord(key, tests.get(key).getFirst(), tests.get(key).getSecond());
|
|
||||||
}
|
|
||||||
|
|
||||||
printer.flush();
|
|
||||||
} catch (IOException e) {
|
|
||||||
ok = false;
|
|
||||||
}
|
|
||||||
return ok;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add tests, and also (re)calculate the mean test duration.
|
|
||||||
* e.g. addTests(read(reader));
|
|
||||||
*
|
|
||||||
* @param testsCollection tests, typically from a csv file.
|
|
||||||
*/
|
|
||||||
public void addTests(@NotNull final List<Tuple3<String, Long, Long>> testsCollection) {
|
|
||||||
testsCollection.forEach(t -> this.tests.put(t.getFirst(), new Tuple2<>(t.getSecond(), t.getThird())));
|
|
||||||
|
|
||||||
// Calculate the mean test time.
|
|
||||||
if (tests.size() > 0) {
|
|
||||||
long total = 0;
|
|
||||||
for (String testName : this.tests.keySet()) total += tests.get(testName).getFirst();
|
|
||||||
meanForTests = new Tuple2<>(total / this.tests.size(), 1L);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the known mean duration of a test.
|
|
||||||
*
|
|
||||||
* @param testName the test name
|
|
||||||
* @return duration in nanos.
|
|
||||||
*/
|
|
||||||
public long getDuration(@NotNull final String testName) {
|
|
||||||
return tests.getOrDefault(testName, meanForTests).getFirst();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add test information. Recalulates mean test duration if already exists.
|
|
||||||
*
|
|
||||||
* @param testName name of the test
|
|
||||||
* @param durationNanos duration
|
|
||||||
*/
|
|
||||||
public void addDuration(@NotNull final String testName, long durationNanos) {
|
|
||||||
final Tuple2<Long, Long> current = tests.getOrDefault(testName, new Tuple2<>(0L, 0L));
|
|
||||||
|
|
||||||
tests.put(testName, recalculateMean(current, durationNanos));
|
|
||||||
|
|
||||||
LOG.debug("Recorded test '{}', mean={} ns, runs={}", testName, tests.get(testName).getFirst(), tests.get(testName).getSecond());
|
|
||||||
|
|
||||||
meanForTests = recalculateMean(meanForTests, durationNanos);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Do we have any test information?
|
|
||||||
*
|
|
||||||
* @return false if no tests info
|
|
||||||
*/
|
|
||||||
public boolean isEmpty() {
|
|
||||||
return tests.isEmpty();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* How many tests do we have?
|
|
||||||
*
|
|
||||||
* @return the number of tests we have information for
|
|
||||||
*/
|
|
||||||
public int size() {
|
|
||||||
return tests.size();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return all tests (and their durations) that being with (or are equal to) `testPrefix`
|
|
||||||
* If not present we just return the mean test duration so that the test is fairly distributed.
|
|
||||||
* @param testPrefix could be just the classname, or the entire classname + testname.
|
|
||||||
* @return list of matching tests
|
|
||||||
*/
|
|
||||||
@NotNull
|
|
||||||
List<Tuple2<String, Long>> startsWith(@NotNull final String testPrefix) {
|
|
||||||
List<Tuple2<String, Long>> results = this.tests.keySet().stream()
|
|
||||||
.filter(t -> t.startsWith(testPrefix))
|
|
||||||
.map(t -> new Tuple2<>(t, getDuration(t)))
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
// We don't know if the testPrefix is a classname or classname.methodname (exact match).
|
|
||||||
if (results == null || results.isEmpty()) {
|
|
||||||
LOG.warn("In {} previously executed tests, could not find any starting with {}", tests.size(), testPrefix);
|
|
||||||
results = Arrays.asList(new Tuple2<>(testPrefix, getMeanDurationForTests()));
|
|
||||||
}
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
@NotNull
|
|
||||||
List<Tuple2<String, Long>> equals(@NotNull final String testPrefix) {
|
|
||||||
List<Tuple2<String, Long>> results = this.tests.keySet().stream()
|
|
||||||
.filter(t -> t.equals(testPrefix))
|
|
||||||
.map(t -> new Tuple2<>(t, getDuration(t)))
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
// We don't know if the testPrefix is a classname or classname.methodname (exact match).
|
|
||||||
if (results == null || results.isEmpty()) {
|
|
||||||
LOG.warn("In {} previously executed tests, could not find any starting with {}", tests.size(), testPrefix);
|
|
||||||
results = Arrays.asList(new Tuple2<>(testPrefix, getMeanDurationForTests()));
|
|
||||||
}
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* How many times has this function been run? Every call to addDuration increments the current value.
|
|
||||||
*
|
|
||||||
* @param testName the test name
|
|
||||||
* @return the number of times the test name has been run.
|
|
||||||
*/
|
|
||||||
public long getRunCount(@NotNull final String testName) {
|
|
||||||
return tests.getOrDefault(testName, new Tuple2<>(0L, 0L)).getSecond();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return the mean duration for a unit to run
|
|
||||||
*
|
|
||||||
* @return mean duration in nanos.
|
|
||||||
*/
|
|
||||||
public long getMeanDurationForTests() {
|
|
||||||
return meanForTests.getFirst();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clear all tests
|
|
||||||
*/
|
|
||||||
void clear() {
|
|
||||||
tests.clear();
|
|
||||||
meanForTests = DEFAULT_MEAN_TUPLE;
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,35 +0,0 @@
|
|||||||
package net.corda.testing;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.util.Collection;
|
|
||||||
|
|
||||||
public class KubePodResult {
|
|
||||||
|
|
||||||
private final int podIndex;
|
|
||||||
private final int resultCode;
|
|
||||||
private final File output;
|
|
||||||
private final Collection<File> binaryResults;
|
|
||||||
|
|
||||||
public KubePodResult(int podIndex, int resultCode, File output, Collection<File> binaryResults) {
|
|
||||||
this.podIndex = podIndex;
|
|
||||||
this.resultCode = resultCode;
|
|
||||||
this.output = output;
|
|
||||||
this.binaryResults = binaryResults;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getResultCode() {
|
|
||||||
return resultCode;
|
|
||||||
}
|
|
||||||
|
|
||||||
public File getOutput() {
|
|
||||||
return output;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Collection<File> getBinaryResults() {
|
|
||||||
return binaryResults;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getPodIndex() {
|
|
||||||
return podIndex;
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,194 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright 2012 the original author or authors.
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package net.corda.testing;
|
|
||||||
|
|
||||||
import org.apache.commons.compress.utils.IOUtils;
|
|
||||||
import org.gradle.api.DefaultTask;
|
|
||||||
import org.gradle.api.GradleException;
|
|
||||||
import org.gradle.api.Transformer;
|
|
||||||
import org.gradle.api.file.FileCollection;
|
|
||||||
import org.gradle.api.internal.file.UnionFileCollection;
|
|
||||||
import org.gradle.api.internal.tasks.testing.junit.result.AggregateTestResultsProvider;
|
|
||||||
import org.gradle.api.internal.tasks.testing.junit.result.BinaryResultBackedTestResultsProvider;
|
|
||||||
import org.gradle.api.internal.tasks.testing.junit.result.TestResultsProvider;
|
|
||||||
import org.gradle.api.internal.tasks.testing.report.DefaultTestReport;
|
|
||||||
import org.gradle.api.tasks.OutputDirectory;
|
|
||||||
import org.gradle.api.tasks.TaskAction;
|
|
||||||
import org.gradle.api.tasks.testing.Test;
|
|
||||||
import org.gradle.internal.logging.ConsoleRenderer;
|
|
||||||
import org.gradle.internal.operations.BuildOperationExecutor;
|
|
||||||
|
|
||||||
import javax.inject.Inject;
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.FileInputStream;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.LinkedList;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import static org.gradle.internal.concurrent.CompositeStoppable.stoppable;
|
|
||||||
import static org.gradle.util.CollectionUtils.collect;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Shameful copy of org.gradle.api.tasks.testing.TestReport - modified to handle results from k8s testing.
|
|
||||||
* see https://docs.gradle.org/current/dsl/org.gradle.api.tasks.testing.TestReport.html
|
|
||||||
*/
|
|
||||||
public class KubesReporting extends DefaultTask {
|
|
||||||
private File destinationDir = new File(getProject().getBuildDir(), "test-reporting");
|
|
||||||
private List<Object> results = new ArrayList<Object>();
|
|
||||||
List<KubePodResult> podResults = new ArrayList<>();
|
|
||||||
boolean shouldPrintOutput = true;
|
|
||||||
|
|
||||||
public KubesReporting() {
|
|
||||||
//force this task to always run, as it's responsible for parsing exit codes
|
|
||||||
getOutputs().upToDateWhen(t -> false);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Inject
|
|
||||||
protected BuildOperationExecutor getBuildOperationExecutor() {
|
|
||||||
throw new UnsupportedOperationException();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the directory to write the HTML report to.
|
|
||||||
*/
|
|
||||||
@OutputDirectory
|
|
||||||
public File getDestinationDir() {
|
|
||||||
return destinationDir;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sets the directory to write the HTML report to.
|
|
||||||
*/
|
|
||||||
public void setDestinationDir(File destinationDir) {
|
|
||||||
this.destinationDir = destinationDir;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the set of binary test results to include in the report.
|
|
||||||
*/
|
|
||||||
public FileCollection getTestResultDirs() {
|
|
||||||
UnionFileCollection dirs = new UnionFileCollection();
|
|
||||||
for (Object result : results) {
|
|
||||||
addTo(result, dirs);
|
|
||||||
}
|
|
||||||
return dirs;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void addTo(Object result, UnionFileCollection dirs) {
|
|
||||||
if (result instanceof Test) {
|
|
||||||
Test test = (Test) result;
|
|
||||||
dirs.addToUnion(getProject().files(test.getBinResultsDir()).builtBy(test));
|
|
||||||
} else if (result instanceof Iterable<?>) {
|
|
||||||
Iterable<?> iterable = (Iterable<?>) result;
|
|
||||||
for (Object nested : iterable) {
|
|
||||||
addTo(nested, dirs);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
dirs.addToUnion(getProject().files(result));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sets the binary test results to use to include in the report. Each entry must point to a binary test results directory generated by a {@link Test}
|
|
||||||
* task.
|
|
||||||
*/
|
|
||||||
public void setTestResultDirs(Iterable<File> testResultDirs) {
|
|
||||||
this.results.clear();
|
|
||||||
reportOn(testResultDirs);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Adds some results to include in the report.
|
|
||||||
*
|
|
||||||
* <p>This method accepts any parameter of the given types:
|
|
||||||
*
|
|
||||||
* <ul>
|
|
||||||
*
|
|
||||||
* <li>A {@link Test} task instance. The results from the test task are included in the report. The test task is automatically added
|
|
||||||
* as a dependency of this task.</li>
|
|
||||||
*
|
|
||||||
* <li>Anything that can be converted to a set of {@link File} instances as per {@link org.gradle.api.Project#files(Object...)}. These must
|
|
||||||
* point to the binary test results directory generated by a {@link Test} task instance.</li>
|
|
||||||
*
|
|
||||||
* <li>An {@link Iterable}. The contents of the iterable are converted recursively.</li>
|
|
||||||
*
|
|
||||||
* </ul>
|
|
||||||
*
|
|
||||||
* @param results The result objects.
|
|
||||||
*/
|
|
||||||
public void reportOn(Object... results) {
|
|
||||||
for (Object result : results) {
|
|
||||||
this.results.add(result);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@TaskAction
|
|
||||||
void generateReport() {
|
|
||||||
TestResultsProvider resultsProvider = createAggregateProvider();
|
|
||||||
try {
|
|
||||||
if (resultsProvider.isHasResults()) {
|
|
||||||
DefaultTestReport testReport = new DefaultTestReport(getBuildOperationExecutor());
|
|
||||||
testReport.generateReport(resultsProvider, getDestinationDir());
|
|
||||||
List<KubePodResult> containersWithNonZeroReturnCodes = podResults.stream()
|
|
||||||
.filter(result -> result.getResultCode() != 0)
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
|
|
||||||
if (!containersWithNonZeroReturnCodes.isEmpty()) {
|
|
||||||
String reportUrl = new ConsoleRenderer().asClickableFileUrl(new File(destinationDir, "index.html"));
|
|
||||||
if (shouldPrintOutput) {
|
|
||||||
containersWithNonZeroReturnCodes.forEach(podResult -> {
|
|
||||||
try {
|
|
||||||
System.out.println("\n##### CONTAINER " + podResult.getPodIndex() + " OUTPUT START #####");
|
|
||||||
IOUtils.copy(new FileInputStream(podResult.getOutput()), System.out);
|
|
||||||
System.out.println("##### CONTAINER " + podResult.getPodIndex() + " OUTPUT END #####\n");
|
|
||||||
} catch (IOException ignored) {
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
String message = "remote build failed, check test report at " + reportUrl;
|
|
||||||
throw new GradleException(message);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
getLogger().info("{} - no binary test results found in dirs: {}.", getPath(), getTestResultDirs().getFiles());
|
|
||||||
setDidWork(false);
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
stoppable(resultsProvider).stop();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public TestResultsProvider createAggregateProvider() {
|
|
||||||
List<TestResultsProvider> resultsProviders = new LinkedList<TestResultsProvider>();
|
|
||||||
try {
|
|
||||||
FileCollection resultDirs = getTestResultDirs();
|
|
||||||
if (resultDirs.getFiles().size() == 1) {
|
|
||||||
return new BinaryResultBackedTestResultsProvider(resultDirs.getSingleFile());
|
|
||||||
} else {
|
|
||||||
return new AggregateTestResultsProvider(collect(resultDirs, resultsProviders, new Transformer<TestResultsProvider, File>() {
|
|
||||||
public TestResultsProvider transform(File dir) {
|
|
||||||
return new BinaryResultBackedTestResultsProvider(dir);
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
} catch (RuntimeException e) {
|
|
||||||
stoppable(resultsProviders).stop();
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,48 +0,0 @@
|
|||||||
package net.corda.testing.retry;
|
|
||||||
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
import java.util.concurrent.Callable;
|
|
||||||
|
|
||||||
public final class Retry {
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(Retry.class);
|
|
||||||
|
|
||||||
public interface RetryStrategy {
|
|
||||||
<T> T run(Callable<T> op) throws RetryException;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static final class RetryException extends RuntimeException {
|
|
||||||
public RetryException(String message) {
|
|
||||||
super(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
public RetryException(String message, Throwable cause) {
|
|
||||||
super(message, cause);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static RetryStrategy fixed(int times) {
|
|
||||||
if (times < 1) throw new IllegalArgumentException();
|
|
||||||
return new RetryStrategy() {
|
|
||||||
@Override
|
|
||||||
public <T> T run(Callable<T> op) {
|
|
||||||
int run = 0;
|
|
||||||
Exception last = null;
|
|
||||||
while (run < times) {
|
|
||||||
try {
|
|
||||||
return op.call();
|
|
||||||
} catch (Exception e) {
|
|
||||||
last = e;
|
|
||||||
log.info("Exception caught: " + e.getMessage());
|
|
||||||
}
|
|
||||||
run++;
|
|
||||||
}
|
|
||||||
throw new RetryException("Operation failed " + run + " times", last);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1,43 +0,0 @@
|
|||||||
package net.corda.testing;
|
|
||||||
|
|
||||||
import org.hamcrest.CoreMatchers;
|
|
||||||
import org.junit.Assert;
|
|
||||||
import org.junit.Test;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
import java.util.stream.IntStream;
|
|
||||||
|
|
||||||
import static org.hamcrest.core.Is.is;
|
|
||||||
import static org.hamcrest.core.IsEqual.equalTo;
|
|
||||||
|
|
||||||
public class ListTestsTest {
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void shouldAllocateTests() {
|
|
||||||
|
|
||||||
for (int numberOfTests = 0; numberOfTests < 100; numberOfTests++) {
|
|
||||||
for (int numberOfForks = 1; numberOfForks < 100; numberOfForks++) {
|
|
||||||
|
|
||||||
|
|
||||||
List<String> tests = IntStream.range(0, numberOfTests).boxed()
|
|
||||||
.map(integer -> "Test.method" + integer.toString())
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
ListShufflerAndAllocator testLister = new ListShufflerAndAllocator(tests);
|
|
||||||
|
|
||||||
List<String> listOfLists = new ArrayList<>();
|
|
||||||
for (int fork = 0; fork < numberOfForks; fork++) {
|
|
||||||
listOfLists.addAll(testLister.getTestsForFork(fork, numberOfForks, 0));
|
|
||||||
}
|
|
||||||
|
|
||||||
Assert.assertThat(listOfLists.size(), CoreMatchers.is(tests.size()));
|
|
||||||
Assert.assertThat(new HashSet<>(listOfLists).size(), CoreMatchers.is(tests.size()));
|
|
||||||
Assert.assertThat(listOfLists.stream().sorted().collect(Collectors.toList()), is(equalTo(tests.stream().sorted().collect(Collectors.toList()))));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,63 +0,0 @@
|
|||||||
package net.corda.testing;
|
|
||||||
|
|
||||||
import org.junit.After;
|
|
||||||
import org.junit.Assert;
|
|
||||||
import org.junit.Before;
|
|
||||||
import org.junit.Test;
|
|
||||||
|
|
||||||
public class PropertiesTest {
|
|
||||||
private static String username = "me";
|
|
||||||
private static String password = "me";
|
|
||||||
private static String cordaType = "corda-project";
|
|
||||||
private static String branch = "mine";
|
|
||||||
private static String targetBranch = "master";
|
|
||||||
|
|
||||||
@Before
|
|
||||||
public void setUp() {
|
|
||||||
System.setProperty("git.branch", branch);
|
|
||||||
System.setProperty("git.target.branch", targetBranch);
|
|
||||||
System.setProperty("artifactory.username", username);
|
|
||||||
System.setProperty("artifactory.password", password);
|
|
||||||
}
|
|
||||||
|
|
||||||
@After
|
|
||||||
public void tearDown() {
|
|
||||||
System.setProperty("git.branch", "");
|
|
||||||
System.setProperty("git.target.branch", "");
|
|
||||||
System.setProperty("artifactory.username", "");
|
|
||||||
System.setProperty("artifactory.password", "");
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void cordaType() {
|
|
||||||
Properties.setRootProjectType(cordaType);
|
|
||||||
Assert.assertEquals(cordaType, Properties.getRootProjectType());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void getUsername() {
|
|
||||||
Assert.assertEquals(username, Properties.getUsername());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void getPassword() {
|
|
||||||
Assert.assertEquals(password, Properties.getPassword());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void getGitBranch() {
|
|
||||||
Assert.assertEquals(branch, Properties.getGitBranch());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void getTargetGitBranch() {
|
|
||||||
Assert.assertEquals(targetBranch, Properties.getTargetGitBranch());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void getPublishJunitTests() {
|
|
||||||
Assert.assertFalse(Properties.getPublishJunitTests());
|
|
||||||
System.setProperty("publish.junit", "true");
|
|
||||||
Assert.assertTrue(Properties.getPublishJunitTests());
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,323 +0,0 @@
|
|||||||
package net.corda.testing;
|
|
||||||
|
|
||||||
import groovy.lang.Tuple2;
|
|
||||||
import org.apache.commons.compress.archivers.ArchiveException;
|
|
||||||
import org.apache.commons.compress.archivers.ArchiveOutputStream;
|
|
||||||
import org.apache.commons.compress.archivers.ArchiveStreamFactory;
|
|
||||||
import org.apache.commons.compress.archivers.zip.ZipArchiveEntry;
|
|
||||||
import org.jetbrains.annotations.NotNull;
|
|
||||||
import org.junit.Assert;
|
|
||||||
import org.junit.Test;
|
|
||||||
|
|
||||||
import java.io.ByteArrayInputStream;
|
|
||||||
import java.io.ByteArrayOutputStream;
|
|
||||||
import java.io.FileInputStream;
|
|
||||||
import java.io.FileNotFoundException;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.StringWriter;
|
|
||||||
import java.nio.charset.StandardCharsets;
|
|
||||||
import java.nio.file.Path;
|
|
||||||
import java.nio.file.Paths;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
import java.util.zip.ZipEntry;
|
|
||||||
import java.util.zip.ZipOutputStream;
|
|
||||||
|
|
||||||
public class TestDurationArtifactsTest {
|
|
||||||
final static String CLASSNAME = "FAKE";
|
|
||||||
|
|
||||||
String getXml(List<Tuple2<String, Long>> tests) {
|
|
||||||
StringBuilder sb = new StringBuilder();
|
|
||||||
sb.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
|
|
||||||
"<testsuites disabled=\"\" errors=\"\" failures=\"\" name=\"\" tests=\"\" time=\"\">\n" +
|
|
||||||
" <testsuite disabled=\"\" errors=\"\" failures=\"\" hostname=\"\" id=\"\"\n" +
|
|
||||||
" name=\"\" package=\"\" skipped=\"\" tests=\"\" time=\"\" timestamp=\"\">\n" +
|
|
||||||
" <properties>\n" +
|
|
||||||
" <property name=\"\" value=\"\"/>\n" +
|
|
||||||
" </properties>\n");
|
|
||||||
|
|
||||||
for (Tuple2<String, Long> test : tests) {
|
|
||||||
Double d = ((double) test.getSecond()) / 1_000_000_000.0;
|
|
||||||
sb.append(" <testcase assertions=\"\" classname=\"" + CLASSNAME + "\" name=\""
|
|
||||||
+ test.getFirst() + "\" status=\"\" time=\"" + d.toString() + "\">\n" +
|
|
||||||
" <skipped/>\n" +
|
|
||||||
" <error message=\"\" type=\"\"/>\n" +
|
|
||||||
" <failure message=\"\" type=\"\"/>\n" +
|
|
||||||
" <system-out/>\n" +
|
|
||||||
" <system-err/>\n" +
|
|
||||||
" </testcase>\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
sb.append(" <system-out/>\n" +
|
|
||||||
" <system-err/>\n" +
|
|
||||||
" </testsuite>\n" +
|
|
||||||
"</testsuites>");
|
|
||||||
return sb.toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
String getXmlWithNoTime(List<Tuple2<String, Long>> tests) {
|
|
||||||
StringBuilder sb = new StringBuilder();
|
|
||||||
sb.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
|
|
||||||
"<testsuites disabled=\"\" errors=\"\" failures=\"\" name=\"\" tests=\"\" time=\"\">\n" +
|
|
||||||
" <testsuite disabled=\"\" errors=\"\" failures=\"\" hostname=\"\" id=\"\"\n" +
|
|
||||||
" name=\"\" package=\"\" skipped=\"\" tests=\"\" time=\"\" timestamp=\"\">\n" +
|
|
||||||
" <properties>\n" +
|
|
||||||
" <property name=\"\" value=\"\"/>\n" +
|
|
||||||
" </properties>\n");
|
|
||||||
|
|
||||||
for (Tuple2<String, Long> test : tests) {
|
|
||||||
Double d = ((double) test.getSecond()) / 1_000_000_000.0;
|
|
||||||
sb.append(" <testcase assertions=\"\" classname=\"" + CLASSNAME + "\" name=\""
|
|
||||||
+ test.getFirst() + "\" status=\"\" time=\"\">\n" +
|
|
||||||
" <skipped/>\n" +
|
|
||||||
" <error message=\"\" type=\"\"/>\n" +
|
|
||||||
" <failure message=\"\" type=\"\"/>\n" +
|
|
||||||
" <system-out/>\n" +
|
|
||||||
" <system-err/>\n" +
|
|
||||||
" </testcase>\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
sb.append(" <system-out/>\n" +
|
|
||||||
" <system-err/>\n" +
|
|
||||||
" </testsuite>\n" +
|
|
||||||
"</testsuites>");
|
|
||||||
return sb.toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void fromJunitXml() {
|
|
||||||
List<Tuple2<String, Long>> tests = new ArrayList<>();
|
|
||||||
tests.add(new Tuple2<>("TEST-A", 111_000_000_000L));
|
|
||||||
tests.add(new Tuple2<>("TEST-B", 222_200_000_000L));
|
|
||||||
final String xml = getXml(tests);
|
|
||||||
|
|
||||||
List<Tuple2<String, Long>> results
|
|
||||||
= TestDurationArtifacts.fromJunitXml(new ByteArrayInputStream(xml.getBytes(StandardCharsets.UTF_8)));
|
|
||||||
|
|
||||||
Assert.assertNotNull(results);
|
|
||||||
|
|
||||||
Assert.assertFalse("Should have results", results.isEmpty());
|
|
||||||
Assert.assertEquals(results.size(), 2);
|
|
||||||
Assert.assertEquals(CLASSNAME + "." + "TEST-A", results.get(0).getFirst());
|
|
||||||
Assert.assertEquals(111_000_000_000L, results.get(0).getSecond().longValue());
|
|
||||||
Assert.assertEquals(CLASSNAME + "." + "TEST-B", results.get(1).getFirst());
|
|
||||||
Assert.assertEquals(222_200_000_000L, results.get(1).getSecond().longValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void fromJunitXmlWithZeroDuration() {
|
|
||||||
// We do return zero values.
|
|
||||||
List<Tuple2<String, Long>> tests = new ArrayList<>();
|
|
||||||
tests.add(new Tuple2<>("TEST-A", 0L));
|
|
||||||
tests.add(new Tuple2<>("TEST-B", 0L));
|
|
||||||
final String xml = getXml(tests);
|
|
||||||
|
|
||||||
List<Tuple2<String, Long>> results
|
|
||||||
= TestDurationArtifacts.fromJunitXml(new ByteArrayInputStream(xml.getBytes(StandardCharsets.UTF_8)));
|
|
||||||
|
|
||||||
Assert.assertNotNull(results);
|
|
||||||
|
|
||||||
Assert.assertFalse("Should have results", results.isEmpty());
|
|
||||||
Assert.assertEquals(results.size(), 2);
|
|
||||||
Assert.assertEquals(CLASSNAME + "." + "TEST-A", results.get(0).getFirst());
|
|
||||||
Assert.assertEquals(0L, results.get(0).getSecond().longValue());
|
|
||||||
Assert.assertEquals(CLASSNAME + "." + "TEST-B", results.get(1).getFirst());
|
|
||||||
Assert.assertEquals(0L, results.get(1).getSecond().longValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void fromJunitXmlWithNoDuration() {
|
|
||||||
// We do return zero values.
|
|
||||||
List<Tuple2<String, Long>> tests = new ArrayList<>();
|
|
||||||
tests.add(new Tuple2<>("TEST-A", 0L));
|
|
||||||
tests.add(new Tuple2<>("TEST-B", 0L));
|
|
||||||
final String xml = getXmlWithNoTime(tests);
|
|
||||||
|
|
||||||
List<Tuple2<String, Long>> results
|
|
||||||
= TestDurationArtifacts.fromJunitXml(new ByteArrayInputStream(xml.getBytes(StandardCharsets.UTF_8)));
|
|
||||||
|
|
||||||
Assert.assertNotNull(results);
|
|
||||||
|
|
||||||
Assert.assertFalse("Should have results", results.isEmpty());
|
|
||||||
Assert.assertEquals(2, results.size());
|
|
||||||
Assert.assertEquals(CLASSNAME + "." + "TEST-A", results.get(0).getFirst());
|
|
||||||
Assert.assertEquals(0L, results.get(0).getSecond().longValue());
|
|
||||||
Assert.assertEquals(CLASSNAME + "." + "TEST-B", results.get(1).getFirst());
|
|
||||||
Assert.assertEquals(0L, results.get(1).getSecond().longValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void canCreateZipFile() throws IOException {
|
|
||||||
Tests outputTests = new Tests();
|
|
||||||
final String testA = "com.corda.testA";
|
|
||||||
final String testB = "com.corda.testB";
|
|
||||||
outputTests.addDuration(testA, 55L);
|
|
||||||
outputTests.addDuration(testB, 33L);
|
|
||||||
|
|
||||||
StringWriter writer = new StringWriter();
|
|
||||||
outputTests.write(writer);
|
|
||||||
String csv = writer.toString();
|
|
||||||
|
|
||||||
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
|
|
||||||
try (ZipOutputStream outputStream = new ZipOutputStream(byteStream, StandardCharsets.UTF_8)) {
|
|
||||||
ZipEntry entry = new ZipEntry("tests.csv");
|
|
||||||
outputStream.putNextEntry(entry);
|
|
||||||
outputStream.write(csv.getBytes(StandardCharsets.UTF_8));
|
|
||||||
outputStream.closeEntry();
|
|
||||||
}
|
|
||||||
Assert.assertNotEquals(0, byteStream.toByteArray().length);
|
|
||||||
|
|
||||||
ByteArrayInputStream inputStream = new ByteArrayInputStream(byteStream.toByteArray());
|
|
||||||
Tests tests = new Tests();
|
|
||||||
Assert.assertTrue(tests.isEmpty());
|
|
||||||
|
|
||||||
TestDurationArtifacts.addTestsFromZippedCsv(tests, inputStream);
|
|
||||||
|
|
||||||
Assert.assertFalse(tests.isEmpty());
|
|
||||||
Assert.assertEquals(2, tests.size());
|
|
||||||
Assert.assertEquals(55L, tests.getDuration(testA));
|
|
||||||
Assert.assertEquals(33L, tests.getDuration(testB));
|
|
||||||
|
|
||||||
Assert.assertEquals(44L, tests.getMeanDurationForTests());
|
|
||||||
}
|
|
||||||
|
|
||||||
void putIntoArchive(@NotNull final ArchiveOutputStream outputStream,
|
|
||||||
@NotNull final String fileName,
|
|
||||||
@NotNull final String content) throws IOException {
|
|
||||||
ZipArchiveEntry entry = new ZipArchiveEntry(fileName);
|
|
||||||
outputStream.putArchiveEntry(entry);
|
|
||||||
outputStream.write(content.getBytes(StandardCharsets.UTF_8));
|
|
||||||
outputStream.closeArchiveEntry();
|
|
||||||
}
|
|
||||||
|
|
||||||
String write(@NotNull final Tests tests) {
|
|
||||||
|
|
||||||
StringWriter writer = new StringWriter();
|
|
||||||
tests.write(writer);
|
|
||||||
return writer.toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void canCreateZipFileContainingMultipleFiles() throws IOException, ArchiveException {
|
|
||||||
// Currently we don't have two csvs in the zip file, but test anyway.
|
|
||||||
|
|
||||||
Tests outputTests = new Tests();
|
|
||||||
final String testA = "com.corda.testA";
|
|
||||||
final String testB = "com.corda.testB";
|
|
||||||
final String testC = "com.corda.testC";
|
|
||||||
outputTests.addDuration(testA, 55L);
|
|
||||||
outputTests.addDuration(testB, 33L);
|
|
||||||
|
|
||||||
String csv = write(outputTests);
|
|
||||||
|
|
||||||
Tests otherTests = new Tests();
|
|
||||||
otherTests.addDuration(testA, 55L);
|
|
||||||
otherTests.addDuration(testB, 33L);
|
|
||||||
otherTests.addDuration(testC, 22L);
|
|
||||||
String otherCsv = write(otherTests);
|
|
||||||
|
|
||||||
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
|
|
||||||
try (ArchiveOutputStream outputStream =
|
|
||||||
new ArchiveStreamFactory("UTF-8").createArchiveOutputStream(ArchiveStreamFactory.ZIP, byteStream)) {
|
|
||||||
putIntoArchive(outputStream, "tests1.csv", csv);
|
|
||||||
putIntoArchive(outputStream, "tests2.csv", otherCsv);
|
|
||||||
outputStream.flush();
|
|
||||||
}
|
|
||||||
|
|
||||||
Assert.assertNotEquals(0, byteStream.toByteArray().length);
|
|
||||||
ByteArrayInputStream inputStream = new ByteArrayInputStream(byteStream.toByteArray());
|
|
||||||
|
|
||||||
Tests tests = new Tests();
|
|
||||||
Assert.assertTrue(tests.isEmpty());
|
|
||||||
|
|
||||||
TestDurationArtifacts.addTestsFromZippedCsv(tests, inputStream);
|
|
||||||
|
|
||||||
Assert.assertFalse(tests.isEmpty());
|
|
||||||
Assert.assertEquals(3, tests.size());
|
|
||||||
Assert.assertEquals((55 + 33 + 22) / 3, tests.getMeanDurationForTests());
|
|
||||||
}
|
|
||||||
|
|
||||||
// // Uncomment to test a file.
|
|
||||||
// // Run a build to generate some test files, create a zip:
|
|
||||||
// // zip ~/tests.zip $(find . -name "*.xml" -type f | grep test-results)
|
|
||||||
//// @Test
|
|
||||||
//// public void testZipFile() throws FileNotFoundException {
|
|
||||||
//// File f = new File(System.getProperty("tests.zip", "/tests.zip");
|
|
||||||
//// List<Tuple2<String, Long>> results = BucketingAllocatorTask.fromZippedXml(new BufferedInputStream(new FileInputStream(f)));
|
|
||||||
//// Assert.assertFalse("Should have results", results.isEmpty());
|
|
||||||
//// System.out.println("Results = " + results.size());
|
|
||||||
//// System.out.println(results.toString());
|
|
||||||
//// }
|
|
||||||
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void branchNamesDoNotHaveDirectoryDelimiters() {
|
|
||||||
// we use the branch name in file and artifact tagging, so '/' would confuse things,
|
|
||||||
// so make sure when we retrieve the property we strip them out.
|
|
||||||
|
|
||||||
final String expected = "release/os/4.3";
|
|
||||||
final String key = "git.branch";
|
|
||||||
final String cordaType = "corda";
|
|
||||||
Properties.setRootProjectType(cordaType);
|
|
||||||
System.setProperty(key, expected);
|
|
||||||
|
|
||||||
Assert.assertEquals(expected, System.getProperty(key));
|
|
||||||
Assert.assertNotEquals(expected, Properties.getGitBranch());
|
|
||||||
Assert.assertEquals("release-os-4.3", Properties.getGitBranch());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void getTestsFromArtifactory() {
|
|
||||||
String artifactory_password = System.getenv("ARTIFACTORY_PASSWORD");
|
|
||||||
String artifactory_username = System.getenv("ARTIFACTORY_USERNAME");
|
|
||||||
String git_branch = System.getenv("CORDA_GIT_BRANCH");
|
|
||||||
String git_target_branch = System.getenv("CORDA_GIT_TARGET_BRANCH");
|
|
||||||
|
|
||||||
if (artifactory_password == null ||
|
|
||||||
artifactory_username == null ||
|
|
||||||
git_branch == null ||
|
|
||||||
git_target_branch == null
|
|
||||||
) {
|
|
||||||
System.out.println("Skipping test - set env vars to run this test");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
System.setProperty("git.branch", git_branch);
|
|
||||||
System.setProperty("git.target.branch", git_target_branch);
|
|
||||||
System.setProperty("artifactory.password", artifactory_password);
|
|
||||||
System.setProperty("artifactory.username", artifactory_username);
|
|
||||||
Assert.assertTrue(TestDurationArtifacts.tests.isEmpty());
|
|
||||||
TestDurationArtifacts.loadTests();
|
|
||||||
Assert.assertFalse(TestDurationArtifacts.tests.isEmpty());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void tryAndWalkForTestXmlFiles() {
|
|
||||||
final String xmlRoot = System.getenv("JUNIT_XML_ROOT");
|
|
||||||
if (xmlRoot == null) {
|
|
||||||
System.out.println("Set JUNIT_XML_ROOT to run this test");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
List<Path> testXmlFiles = TestDurationArtifacts.getTestXmlFiles(Paths.get(xmlRoot));
|
|
||||||
Assert.assertFalse(testXmlFiles.isEmpty());
|
|
||||||
|
|
||||||
for (Path testXmlFile : testXmlFiles.stream().sorted().collect(Collectors.toList())) {
|
|
||||||
// System.out.println(testXmlFile.toString());
|
|
||||||
}
|
|
||||||
|
|
||||||
System.out.println("\n\nTESTS\n\n");
|
|
||||||
for (Path testResult : testXmlFiles) {
|
|
||||||
try {
|
|
||||||
final List<Tuple2<String, Long>> unitTests = TestDurationArtifacts.fromJunitXml(new FileInputStream(testResult.toFile()));
|
|
||||||
for (Tuple2<String, Long> unitTest : unitTests) {
|
|
||||||
System.out.println(unitTest.getFirst() + " --> " + BucketingAllocator.getDuration(unitTest.getSecond()));
|
|
||||||
}
|
|
||||||
|
|
||||||
} catch (FileNotFoundException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,145 +0,0 @@
|
|||||||
package net.corda.testing;
|
|
||||||
|
|
||||||
import org.junit.Assert;
|
|
||||||
import org.junit.Test;
|
|
||||||
|
|
||||||
import java.io.StringReader;
|
|
||||||
import java.io.StringWriter;
|
|
||||||
|
|
||||||
public class TestsTest {
|
|
||||||
@Test
|
|
||||||
public void read() {
|
|
||||||
final Tests tests = new Tests();
|
|
||||||
Assert.assertTrue(tests.isEmpty());
|
|
||||||
|
|
||||||
final String s = Tests.TEST_NAME + "," + Tests.MEAN_DURATION_NANOS + "," + Tests.NUMBER_OF_RUNS + '\n'
|
|
||||||
+ "hello,100,4\n";
|
|
||||||
tests.addTests(Tests.read(new StringReader(s)));
|
|
||||||
|
|
||||||
Assert.assertFalse(tests.isEmpty());
|
|
||||||
Assert.assertEquals((long) tests.getDuration("hello"), 100);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void write() {
|
|
||||||
final StringWriter writer = new StringWriter();
|
|
||||||
final Tests tests = new Tests();
|
|
||||||
Assert.assertTrue(tests.isEmpty());
|
|
||||||
tests.addDuration("hello", 100);
|
|
||||||
tests.write(writer);
|
|
||||||
Assert.assertFalse(tests.isEmpty());
|
|
||||||
|
|
||||||
final StringReader reader = new StringReader(writer.toString());
|
|
||||||
final Tests otherTests = new Tests();
|
|
||||||
otherTests.addTests(Tests.read(reader));
|
|
||||||
|
|
||||||
Assert.assertFalse(tests.isEmpty());
|
|
||||||
Assert.assertFalse(otherTests.isEmpty());
|
|
||||||
Assert.assertEquals(tests.size(), otherTests.size());
|
|
||||||
Assert.assertEquals(tests.getDuration("hello"), otherTests.getDuration("hello"));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void addingTestChangesMeanDuration() {
|
|
||||||
final Tests tests = new Tests();
|
|
||||||
final String s = Tests.TEST_NAME + "," + Tests.MEAN_DURATION_NANOS + "," + Tests.NUMBER_OF_RUNS + '\n'
|
|
||||||
+ "hello,100,4\n";
|
|
||||||
tests.addTests(Tests.read(new StringReader(s)));
|
|
||||||
|
|
||||||
Assert.assertFalse(tests.isEmpty());
|
|
||||||
// 400 total for 4 tests
|
|
||||||
Assert.assertEquals((long) tests.getDuration("hello"), 100);
|
|
||||||
|
|
||||||
// 1000 total for 5 tests = 200 mean
|
|
||||||
tests.addDuration("hello", 600);
|
|
||||||
Assert.assertEquals((long) tests.getDuration("hello"), 200);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void addTests() {
|
|
||||||
final Tests tests = new Tests();
|
|
||||||
Assert.assertTrue(tests.isEmpty());
|
|
||||||
|
|
||||||
final String s = Tests.TEST_NAME + "," + Tests.MEAN_DURATION_NANOS + "," + Tests.NUMBER_OF_RUNS + '\n'
|
|
||||||
+ "hello,100,4\n"
|
|
||||||
+ "goodbye,200,4\n";
|
|
||||||
|
|
||||||
tests.addTests(Tests.read(new StringReader(s)));
|
|
||||||
Assert.assertFalse(tests.isEmpty());
|
|
||||||
Assert.assertEquals(tests.size(), 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void getDuration() {
|
|
||||||
final Tests tests = new Tests();
|
|
||||||
Assert.assertTrue(tests.isEmpty());
|
|
||||||
|
|
||||||
final String s = Tests.TEST_NAME + "," + Tests.MEAN_DURATION_NANOS + "," + Tests.NUMBER_OF_RUNS + '\n'
|
|
||||||
+ "hello,100,4\n"
|
|
||||||
+ "goodbye,200,4\n";
|
|
||||||
|
|
||||||
tests.addTests(Tests.read(new StringReader(s)));
|
|
||||||
Assert.assertFalse(tests.isEmpty());
|
|
||||||
Assert.assertEquals(tests.size(), 2);
|
|
||||||
|
|
||||||
Assert.assertEquals(100L, tests.getDuration("hello"));
|
|
||||||
Assert.assertEquals(200L, tests.getDuration("goodbye"));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void addTestInfo() {
|
|
||||||
final Tests tests = new Tests();
|
|
||||||
Assert.assertTrue(tests.isEmpty());
|
|
||||||
|
|
||||||
final String s = Tests.TEST_NAME + "," + Tests.MEAN_DURATION_NANOS + "," + Tests.NUMBER_OF_RUNS + '\n'
|
|
||||||
+ "hello,100,4\n"
|
|
||||||
+ "goodbye,200,4\n";
|
|
||||||
|
|
||||||
tests.addTests(Tests.read(new StringReader(s)));
|
|
||||||
Assert.assertFalse(tests.isEmpty());
|
|
||||||
Assert.assertEquals(2, tests.size());
|
|
||||||
|
|
||||||
tests.addDuration("foo", 55);
|
|
||||||
tests.addDuration("bar", 33);
|
|
||||||
Assert.assertEquals(4, tests.size());
|
|
||||||
|
|
||||||
tests.addDuration("bar", 56);
|
|
||||||
Assert.assertEquals(4, tests.size());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void addingNewDurationUpdatesRunCount() {
|
|
||||||
|
|
||||||
final Tests tests = new Tests();
|
|
||||||
Assert.assertTrue(tests.isEmpty());
|
|
||||||
|
|
||||||
final String s = Tests.TEST_NAME + "," + Tests.MEAN_DURATION_NANOS + "," + Tests.NUMBER_OF_RUNS + '\n'
|
|
||||||
+ "hello,100,4\n"
|
|
||||||
+ "goodbye,200,4\n";
|
|
||||||
|
|
||||||
tests.addTests(Tests.read(new StringReader(s)));
|
|
||||||
Assert.assertFalse(tests.isEmpty());
|
|
||||||
Assert.assertEquals(2, tests.size());
|
|
||||||
|
|
||||||
tests.addDuration("foo", 55);
|
|
||||||
|
|
||||||
Assert.assertEquals(0, tests.getRunCount("bar"));
|
|
||||||
|
|
||||||
tests.addDuration("bar", 33);
|
|
||||||
Assert.assertEquals(4, tests.size());
|
|
||||||
|
|
||||||
tests.addDuration("bar", 56);
|
|
||||||
Assert.assertEquals(2, tests.getRunCount("bar"));
|
|
||||||
Assert.assertEquals(4, tests.size());
|
|
||||||
|
|
||||||
tests.addDuration("bar", 56);
|
|
||||||
tests.addDuration("bar", 56);
|
|
||||||
Assert.assertEquals(4, tests.getRunCount("bar"));
|
|
||||||
|
|
||||||
Assert.assertEquals(4, tests.getRunCount("hello"));
|
|
||||||
tests.addDuration("hello", 22);
|
|
||||||
tests.addDuration("hello", 22);
|
|
||||||
tests.addDuration("hello", 22);
|
|
||||||
Assert.assertEquals(7, tests.getRunCount("hello"));
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,178 +0,0 @@
|
|||||||
package net.corda.testing;
|
|
||||||
|
|
||||||
import org.hamcrest.collection.IsIterableContainingInAnyOrder;
|
|
||||||
import org.junit.Assert;
|
|
||||||
import org.junit.Ignore;
|
|
||||||
import org.junit.Test;
|
|
||||||
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.concurrent.TimeUnit;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
import java.util.stream.Stream;
|
|
||||||
|
|
||||||
import static org.hamcrest.CoreMatchers.is;
|
|
||||||
|
|
||||||
public class BucketingAllocatorTest {
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void shouldAlwaysBucketTestsEvenIfNotInTimedFile() {
|
|
||||||
Tests tests = new Tests();
|
|
||||||
BucketingAllocator bucketingAllocator = new BucketingAllocator(1, () -> tests);
|
|
||||||
|
|
||||||
Object task = new Object();
|
|
||||||
bucketingAllocator.addSource(() -> Arrays.asList("SomeTestingClass", "AnotherTestingClass"), task);
|
|
||||||
|
|
||||||
bucketingAllocator.generateTestPlan();
|
|
||||||
List<String> testsForForkAndTestTask = bucketingAllocator.getTestsForForkAndTestTask(0, task);
|
|
||||||
|
|
||||||
Assert.assertThat(testsForForkAndTestTask, IsIterableContainingInAnyOrder.containsInAnyOrder("SomeTestingClass", "AnotherTestingClass"));
|
|
||||||
|
|
||||||
List<BucketingAllocator.TestsForForkContainer> forkContainers = bucketingAllocator.getForkContainers();
|
|
||||||
Assert.assertEquals(1, forkContainers.size());
|
|
||||||
// There aren't any known tests, so it will use the default instead.
|
|
||||||
Assert.assertEquals(Tests.DEFAULT_MEAN_NANOS, tests.getMeanDurationForTests());
|
|
||||||
Assert.assertEquals(2 * tests.getMeanDurationForTests(), forkContainers.get(0).getCurrentDuration().longValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void shouldAlwaysBucketTestsEvenIfNotInTimedFileAndUseMeanValue() {
|
|
||||||
final Tests tests = new Tests();
|
|
||||||
tests.addDuration("someRandomTestNameToForceMeanValue", 1_000_000_000);
|
|
||||||
|
|
||||||
BucketingAllocator bucketingAllocator = new BucketingAllocator(1, () -> tests);
|
|
||||||
|
|
||||||
Object task = new Object();
|
|
||||||
List<String> testNames = Arrays.asList("SomeTestingClass", "AnotherTestingClass");
|
|
||||||
|
|
||||||
bucketingAllocator.addSource(() -> testNames, task);
|
|
||||||
|
|
||||||
bucketingAllocator.generateTestPlan();
|
|
||||||
List<String> testsForForkAndTestTask = bucketingAllocator.getTestsForForkAndTestTask(0, task);
|
|
||||||
|
|
||||||
Assert.assertThat(testsForForkAndTestTask, IsIterableContainingInAnyOrder.containsInAnyOrder(testNames.toArray()));
|
|
||||||
|
|
||||||
List<BucketingAllocator.TestsForForkContainer> forkContainers = bucketingAllocator.getForkContainers();
|
|
||||||
Assert.assertEquals(1, forkContainers.size());
|
|
||||||
Assert.assertEquals(testNames.size() * tests.getMeanDurationForTests(), forkContainers.get(0).getCurrentDuration().longValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void shouldAllocateTestsAcrossForksEvenIfNoMatchingTestsFound() {
|
|
||||||
Tests tests = new Tests();
|
|
||||||
tests.addDuration("SomeTestingClass", 1_000_000_000);
|
|
||||||
tests.addDuration("AnotherTestingClass", 2222);
|
|
||||||
BucketingAllocator bucketingAllocator = new BucketingAllocator(2, () -> tests);
|
|
||||||
|
|
||||||
Object task = new Object();
|
|
||||||
bucketingAllocator.addSource(() -> Arrays.asList("SomeTestingClass", "AnotherTestingClass"), task);
|
|
||||||
|
|
||||||
bucketingAllocator.generateTestPlan();
|
|
||||||
List<String> testsForForkOneAndTestTask = bucketingAllocator.getTestsForForkAndTestTask(0, task);
|
|
||||||
List<String> testsForForkTwoAndTestTask = bucketingAllocator.getTestsForForkAndTestTask(1, task);
|
|
||||||
|
|
||||||
Assert.assertThat(testsForForkOneAndTestTask.size(), is(1));
|
|
||||||
Assert.assertThat(testsForForkTwoAndTestTask.size(), is(1));
|
|
||||||
|
|
||||||
List<String> allTests = Stream.of(testsForForkOneAndTestTask, testsForForkTwoAndTestTask).flatMap(Collection::stream).collect(Collectors.toList());
|
|
||||||
|
|
||||||
Assert.assertThat(allTests, IsIterableContainingInAnyOrder.containsInAnyOrder("SomeTestingClass", "AnotherTestingClass"));
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void shouldAllocateTestsAcrossForksEvenIfNoMatchingTestsFoundAndUseExisitingValues() {
|
|
||||||
Tests tests = new Tests();
|
|
||||||
tests.addDuration("SomeTestingClass", 1_000_000_000L);
|
|
||||||
tests.addDuration("AnotherTestingClass", 3_000_000_000L);
|
|
||||||
BucketingAllocator bucketingAllocator = new BucketingAllocator(2, () -> tests);
|
|
||||||
|
|
||||||
Object task = new Object();
|
|
||||||
bucketingAllocator.addSource(() -> Arrays.asList("YetAnotherTestingClass", "SomeTestingClass", "AnotherTestingClass"), task);
|
|
||||||
|
|
||||||
bucketingAllocator.generateTestPlan();
|
|
||||||
List<String> testsForForkOneAndTestTask = bucketingAllocator.getTestsForForkAndTestTask(0, task);
|
|
||||||
List<String> testsForForkTwoAndTestTask = bucketingAllocator.getTestsForForkAndTestTask(1, task);
|
|
||||||
|
|
||||||
Assert.assertThat(testsForForkOneAndTestTask.size(), is(1));
|
|
||||||
Assert.assertThat(testsForForkTwoAndTestTask.size(), is(2));
|
|
||||||
|
|
||||||
List<String> allTests = Stream.of(testsForForkOneAndTestTask, testsForForkTwoAndTestTask).flatMap(Collection::stream).collect(Collectors.toList());
|
|
||||||
|
|
||||||
Assert.assertThat(allTests, IsIterableContainingInAnyOrder.containsInAnyOrder("YetAnotherTestingClass", "SomeTestingClass", "AnotherTestingClass"));
|
|
||||||
|
|
||||||
List<BucketingAllocator.TestsForForkContainer> forkContainers = bucketingAllocator.getForkContainers();
|
|
||||||
Assert.assertEquals(2, forkContainers.size());
|
|
||||||
// Internally, we should have sorted the tests by decreasing size, so the largest would be added to the first bucket.
|
|
||||||
Assert.assertEquals(TimeUnit.SECONDS.toNanos(3), forkContainers.get(0).getCurrentDuration().longValue());
|
|
||||||
|
|
||||||
// At this point, the second bucket is empty. We also know that the test average is 2s (1+3)/2.
|
|
||||||
// So we should put SomeTestingClass (1s) into this bucket, AND then put the 'unknown' test 'YetAnotherTestingClass'
|
|
||||||
// into this bucket, using the mean duration = 2s, resulting in 3s.
|
|
||||||
Assert.assertEquals(TimeUnit.SECONDS.toNanos(3), forkContainers.get(1).getCurrentDuration().longValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testBucketAllocationForSeveralTestsDistributedByClassName() {
|
|
||||||
Tests tests = new Tests();
|
|
||||||
tests.addDuration("SmallTestingClass", 1_000_000_000L);
|
|
||||||
tests.addDuration("LargeTestingClass", 3_000_000_000L);
|
|
||||||
tests.addDuration("MediumTestingClass", 2_000_000_000L);
|
|
||||||
// Gives a nice mean of 2s.
|
|
||||||
Assert.assertEquals(TimeUnit.SECONDS.toNanos(2), tests.getMeanDurationForTests());
|
|
||||||
|
|
||||||
BucketingAllocator bucketingAllocator = new BucketingAllocator(4, () -> tests);
|
|
||||||
|
|
||||||
List<String> testNames = Arrays.asList(
|
|
||||||
"EvenMoreTestingClass",
|
|
||||||
"YetAnotherTestingClass",
|
|
||||||
"AndYetAnotherTestingClass",
|
|
||||||
"OhYesAnotherTestingClass",
|
|
||||||
"MediumTestingClass",
|
|
||||||
"SmallTestingClass",
|
|
||||||
"LargeTestingClass");
|
|
||||||
|
|
||||||
Object task = new Object();
|
|
||||||
bucketingAllocator.addSource(() -> testNames, task);
|
|
||||||
|
|
||||||
// does not preserve order of known tests and unknown tests....
|
|
||||||
bucketingAllocator.generateTestPlan();
|
|
||||||
|
|
||||||
List<String> testsForFork0 = bucketingAllocator.getTestsForForkAndTestTask(0, task);
|
|
||||||
List<String> testsForFork1 = bucketingAllocator.getTestsForForkAndTestTask(1, task);
|
|
||||||
List<String> testsForFork2 = bucketingAllocator.getTestsForForkAndTestTask(2, task);
|
|
||||||
List<String> testsForFork3 = bucketingAllocator.getTestsForForkAndTestTask(3, task);
|
|
||||||
|
|
||||||
Assert.assertThat(testsForFork0.size(), is(1));
|
|
||||||
Assert.assertThat(testsForFork1.size(), is(2));
|
|
||||||
Assert.assertThat(testsForFork2.size(), is(2));
|
|
||||||
Assert.assertThat(testsForFork3.size(), is(2));
|
|
||||||
|
|
||||||
// This must be true as it is the largest value.
|
|
||||||
Assert.assertTrue(testsForFork0.contains("LargeTestingClass"));
|
|
||||||
|
|
||||||
List<String> allTests = Stream.of(testsForFork0, testsForFork1, testsForFork2, testsForFork3)
|
|
||||||
.flatMap(Collection::stream).collect(Collectors.toList());
|
|
||||||
|
|
||||||
Assert.assertThat(allTests, IsIterableContainingInAnyOrder.containsInAnyOrder(testNames.toArray()));
|
|
||||||
|
|
||||||
List<BucketingAllocator.TestsForForkContainer> forkContainers = bucketingAllocator.getForkContainers();
|
|
||||||
Assert.assertEquals(4, forkContainers.size());
|
|
||||||
|
|
||||||
long totalDuration = forkContainers.stream().mapToLong(c -> c.getCurrentDuration()).sum();
|
|
||||||
Assert.assertEquals(tests.getMeanDurationForTests() * testNames.size(), totalDuration);
|
|
||||||
|
|
||||||
Assert.assertEquals(TimeUnit.SECONDS.toNanos(3), forkContainers.get(0).getCurrentDuration().longValue());
|
|
||||||
Assert.assertEquals(TimeUnit.SECONDS.toNanos(4), forkContainers.get(1).getCurrentDuration().longValue());
|
|
||||||
Assert.assertEquals(TimeUnit.SECONDS.toNanos(4), forkContainers.get(2).getCurrentDuration().longValue());
|
|
||||||
Assert.assertEquals(TimeUnit.SECONDS.toNanos(3), forkContainers.get(3).getCurrentDuration().longValue());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void durationToString() {
|
|
||||||
Assert.assertEquals("1 mins", BucketingAllocator.getDuration(60_000_000_000L));
|
|
||||||
Assert.assertEquals("4 secs", BucketingAllocator.getDuration(4_000_000_000L));
|
|
||||||
Assert.assertEquals("400 ms", BucketingAllocator.getDuration(400_000_000L));
|
|
||||||
Assert.assertEquals("400000 ns", BucketingAllocator.getDuration(400_000L));
|
|
||||||
}
|
|
||||||
}
|
|
Loading…
Reference in New Issue
Block a user