diff --git a/.idea/compiler.xml b/.idea/compiler.xml
index 03aafa00c9..e16d02f15e 100644
--- a/.idea/compiler.xml
+++ b/.idea/compiler.xml
@@ -7,6 +7,8 @@
+
+
diff --git a/config/dev/log4j2.xml b/config/dev/log4j2.xml
index c3da2f3636..d2394a238d 100644
--- a/config/dev/log4j2.xml
+++ b/config/dev/log4j2.xml
@@ -12,11 +12,10 @@
- logs
+ ${sys:log-path:-logs}
node-${hostName}
- ${sys:log-path}/archive
- error
- info
+ ${log-path}/archive
+ ${sys:defaultLogLevel:-info}
@@ -24,7 +23,7 @@
-
+
-
+
@@ -49,7 +48,7 @@
@@ -74,15 +73,23 @@
-
-
-
+
+
+
+
+
+
+
+
+
+
+
diff --git a/docs/source/blob-inspector.rst b/docs/source/blob-inspector.rst
index ba301eef0a..1742ee8771 100644
--- a/docs/source/blob-inspector.rst
+++ b/docs/source/blob-inspector.rst
@@ -14,8 +14,12 @@ To run simply pass in the file or URL as the first parameter:
Use the ``--help`` flag for a full list of command line options.
-``SerializedBytes`
-~~~~~~~~~~~~~~~~~~
+When inspecting your custom data structures, there's no need to include the jars containing the class definitions for them
+in the classpath. The blob inspector (or rather the serialization framework) is able to synthesis any classes found in the
+blob that aren't on the classpath.
+
+SerializedBytes
+~~~~~~~~~~~~~~~
One thing to note is that the binary blob may contain embedded ``SerializedBytes`` objects. Rather than printing these
out as a Base64 string, the blob inspector will first materialise them into Java objects and then output those. You will
@@ -23,41 +27,41 @@ see this when dealing with classes such as ``SignedData`` or other structures th
``nodeInfo-*`` files or the ``network-parameters`` file in the node's directory. For example, the output of a node-info
file may look like:
-.. container:: codeset
- .. sourcecode:: yaml
+**-\\-format=YAML**
+::
- net.corda.nodeapi.internal.SignedNodeInfo
- ---
- raw:
- class: "net.corda.core.node.NodeInfo"
- deserialized:
- addresses:
- - "localhost:10011"
- legalIdentitiesAndCerts:
- - "O=BankOfCorda, L=New York, C=US"
- platformVersion: 4
- serial: 1527074180971
- signatures:
- - !!binary |
- dmoAnnzcv0MzRN+3ZSCDcCJIAbXnoYy5mFWB3Nijndzu/dzIoYdIawINXbNSY/5z2XloDK01vZRV
- TreFZCbZAg==
+ net.corda.nodeapi.internal.SignedNodeInfo
+ ---
+ raw:
+ class: "net.corda.core.node.NodeInfo"
+ deserialized:
+ addresses:
+ - "localhost:10005"
+ legalIdentitiesAndCerts:
+ - "O=BankOfCorda, L=London, C=GB"
+ platformVersion: 4
+ serial: 1527851068715
+ signatures:
+ - !!binary |-
+ VFRy4frbgRDbCpK1Vo88PyUoj01vbRnMR3ROR2abTFk7yJ14901aeScX/CiEP+CDGiMRsdw01cXt\nhKSobAY7Dw==
- .. sourcecode:: json
+**-\\-format=JSON**
+::
- net.corda.nodeapi.internal.SignedNodeInfo
- {
- "raw" : {
- "class" : "net.corda.core.node.NodeInfo",
- "deserialized" : {
- "addresses" : [ "localhost:10011" ],
- "legalIdentitiesAndCerts" : [ "O=BankOfCorda, L=New York, C=US" ],
- "platformVersion" : 4,
- "serial" : 1527074180971
- }
- },
- "signatures" : [ "dmoAnnzcv0MzRN+3ZSCDcCJIAbXnoYy5mFWB3Nijndzu/dzIoYdIawINXbNSY/5z2XloDK01vZRVTreFZCbZAg==" ]
+ net.corda.nodeapi.internal.SignedNodeInfo
+ {
+ "raw" : {
+ "class" : "net.corda.core.node.NodeInfo",
+ "deserialized" : {
+ "addresses" : [ "localhost:10005" ],
+ "legalIdentitiesAndCerts" : [ "O=BankOfCorda, L=London, C=GB" ],
+ "platformVersion" : 4,
+ "serial" : 1527851068715
}
+ },
+ "signatures" : [ "VFRy4frbgRDbCpK1Vo88PyUoj01vbRnMR3ROR2abTFk7yJ14901aeScX/CiEP+CDGiMRsdw01cXthKSobAY7Dw==" ]
+ }
Notice the file is actually a serialised ``SignedNodeInfo`` object, which has a ``raw`` property of type ``SerializedBytes``.
This property is materialised into a ``NodeInfo`` and is output under the ``deserialized`` field.
diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst
index 2ae98a62b5..107045daed 100644
--- a/docs/source/changelog.rst
+++ b/docs/source/changelog.rst
@@ -8,6 +8,8 @@ Unreleased
==========
* Introduced a hierarchy of ``DatabaseMigrationException``s, allowing ``NodeStartup`` to gracefully inform users of problems related to database migrations before exiting with a non-zero code.
+* Fixed an issue preventing out of process nodes started by the ``Driver`` from logging to file.
+
* Fixed an issue with ``CashException`` not being able to deserialise after the introduction of AMQP for RPC.
* Removed -xmx VM argument from Explorer's Capsule setup. This helps avoiding out of memory errors.
@@ -120,6 +122,8 @@ Unreleased
For instance, this method will check if an ECC key lies on a valid curve or if an RSA key is >= 2048bits. This might
be required for extra key validation checks, e.g., for Doorman to check that a CSR key meets the minimum security requirements.
+* Table name with a typo changed from ``NODE_ATTCHMENTS_CONTRACTS`` to ``NODE_ATTACHMENTS_CONTRACTS``.
+
.. _changelog_v3.1:
Version 3.1
diff --git a/docs/source/upgrade-notes.rst b/docs/source/upgrade-notes.rst
index 9446b80c4f..afe4bb725a 100644
--- a/docs/source/upgrade-notes.rst
+++ b/docs/source/upgrade-notes.rst
@@ -68,6 +68,12 @@ UNRELEASED
No action is needed for default node tables as ``PersistentStateRef`` is used as Primary Key only and the backing columns are automatically not nullable
or custom Cordapp entities using ``PersistentStateRef`` as Primary Key.
+* H2 database upgrade - the table with a typo has been change, for each database instance and schema run the following SQL statement:
+
+ ALTER TABLE [schema].NODE_ATTCHMENTS_CONTRACTS RENAME TO NODE_ATTACHMENTS_CONTRACTS;
+
+ Schema is optional, run SQL when the node is not running.
+
Upgrading to Corda Enterprise 3.0 Developer Preview
---------------------------------------------------
A prerequisite to upgrade to Corda Enterprise 3.0 is to ensure your CorDapp is upgraded to Open Source Corda V3.x.
diff --git a/experimental/avalanche/Readme.md b/experimental/avalanche/Readme.md
new file mode 100644
index 0000000000..e41775f225
--- /dev/null
+++ b/experimental/avalanche/Readme.md
@@ -0,0 +1,24 @@
+# Avalanche Simulation
+
+Experimental simulation of the Avalanche protocol by Team Rocket. This
+implementation is incomplete.
+
+The paper: [Snowflake to Avalanche: A Novel Metastable Consensus Protocol Family for
+ Cryptocurrencies](https://ipfs.io/ipfs/QmUy4jh5mGNZvLkjies1RWM4YuvJh5o2FYopNPVYwrRVGV).
+
+## Running the Simulation
+```
+./gradlew shadowJar
+java -jar build/libs/avalanche-all.jar --dump-dags
+```
+
+### Visualising the DAGs
+```
+for f in node-0-*.dot; do dot -Tpng -O $f; done
+```
+The above command generates a number of PNG files `node-0-*.png`, showing the
+evolution of the DAG. The nodes are labeled with the ID of the spent state,
+the chit and confidence values. The prefered transaction of a conflict set is
+labelled with a star. Accepted transactions are blue.
+
+![DAG](./images/node-0-003.dot.png)
diff --git a/experimental/avalanche/build.gradle b/experimental/avalanche/build.gradle
new file mode 100644
index 0000000000..ea6e2a4632
--- /dev/null
+++ b/experimental/avalanche/build.gradle
@@ -0,0 +1,42 @@
+buildscript {
+ ext.kotlin_version = '1.2.40'
+
+ repositories {
+ mavenCentral()
+ jcenter()
+ }
+
+ dependencies {
+ classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
+ classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.3'
+ }
+}
+
+plugins {
+ id "org.jetbrains.kotlin.jvm"
+ id 'com.github.johnrengelman.shadow' version '2.0.3'
+ id 'java'
+ id 'application'
+}
+repositories {
+ mavenCentral()
+}
+dependencies {
+ compile "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlin_version"
+ compile group: 'info.picocli', name: 'picocli', version: '3.0.1'
+ testCompile group: 'junit', name: 'junit', version: '4.12'
+}
+compileKotlin {
+ kotlinOptions {
+ jvmTarget = "1.8"
+ }
+}
+compileTestKotlin {
+ kotlinOptions {
+ jvmTarget = "1.8"
+ }
+}
+mainClassName = "net.corda.avalanche.MainKt"
+shadowJar {
+ baseName = "avalanche"
+}
diff --git a/experimental/avalanche/images/node-0-003.dot.png b/experimental/avalanche/images/node-0-003.dot.png
new file mode 100644
index 0000000000..65c8bf911a
Binary files /dev/null and b/experimental/avalanche/images/node-0-003.dot.png differ
diff --git a/experimental/avalanche/src/main/kotlin/net/corda/avalanche/Main.kt b/experimental/avalanche/src/main/kotlin/net/corda/avalanche/Main.kt
new file mode 100644
index 0000000000..3d265d78e7
--- /dev/null
+++ b/experimental/avalanche/src/main/kotlin/net/corda/avalanche/Main.kt
@@ -0,0 +1,231 @@
+package net.corda.avalanche
+
+import picocli.CommandLine
+import java.io.File
+import java.util.*
+import kotlin.collections.LinkedHashMap
+
+fun main(args: Array) {
+
+ val parameters = Parameters()
+ CommandLine(parameters).parse(*args)
+ if (parameters.helpRequested) {
+ CommandLine.usage(Parameters(), System.out)
+ return
+ }
+
+ val network = Network(parameters)
+ val n1 = network.nodes[0]
+ val c1 = mutableListOf()
+ val c2 = mutableListOf()
+
+ repeat(parameters.nrTransactions) {
+ val n = network.nodes.shuffled(network.rng).first()
+ c1.add(n.onGenerateTx(it))
+ if (network.rng.nextDouble() < parameters.doubleSpendRatio) {
+ val d = network.rng.nextInt(it)
+ println("double spend of $d")
+ val n2 = network.nodes.shuffled(network.rng).first()
+ c2.add(n2.onGenerateTx(d))
+ }
+
+ network.run()
+
+ if (parameters.dumpDags) {
+ n1.dumpDag(File("node-0-${String.format("%03d", it)}.dot"))
+ }
+ println("$it: " + String.format("%.3f", fractionAccepted(n1)))
+ }
+
+ val conflictSets = (c1 + c2).groupBy { it.data }.filterValues { it.size > 1 }
+ conflictSets.forEach { v, txs ->
+ val acceptance = txs.map { t -> network.nodes.map { it.isAccepted(t) }.any { it } }
+ require(acceptance.filter { it }.size < 2) { "More than one transaction of the conflict set of $v got accepted." }
+ }
+}
+
+fun fractionAccepted(n: Node): Double {
+ val accepted = n.transactions.values.filter { n.isAccepted(it) }.size
+ return accepted.toDouble() / n.transactions.size
+}
+
+data class Transaction(
+ val id: UUID,
+ val data: Int,
+ val parents: List,
+ var chit: Int = 0,
+ var confidence: Int = 0) {
+ override fun toString(): String {
+ return "T(id=${id.toString().take(5)}, data=$data, parents=[${parents.map {it.toString().take(5) }}, chit=$chit, confidence=$confidence)"
+ }
+}
+
+data class ConflictSet(
+ var pref: Transaction,
+ var last: Transaction,
+ var count: Int,
+ var size: Int
+)
+
+class Network(val parameters: Parameters) {
+ val rng = Random(parameters.seed)
+ val tx = Transaction(UUID.randomUUID(), -1, emptyList(), 1)
+ val nodes = (0..parameters.nrNodes).map { Node(it, parameters, tx.copy(),this, rng) }
+ fun run() {
+ nodes.forEach { it.avalancheLoop() }
+ }
+}
+
+class Node(val id: Int, parameters: Parameters, val genesisTx: Transaction, val network: Network, val rng: Random) {
+
+ val alpha = parameters.alpha
+ val k = parameters.k
+ val beta1 = parameters.beta1
+ val beta2 = parameters.beta2
+
+ val transactions = LinkedHashMap(mapOf(genesisTx.id to genesisTx))
+ val queried = mutableSetOf(genesisTx.id)
+ val conflicts = mutableMapOf(genesisTx.data to ConflictSet(genesisTx, genesisTx, 0, 1))
+
+ val accepted = mutableSetOf(genesisTx.id)
+ val parentSets = mutableMapOf>()
+
+ fun onGenerateTx(data: Int): Transaction {
+ val edges = parentSelection()
+ val t = Transaction(UUID.randomUUID(), data, edges.map { it.id })
+ onReceiveTx(this, t)
+ return t
+ }
+
+ fun onReceiveTx(sender: Node, tx: Transaction) {
+ if (transactions.contains(tx.id)) return
+ tx.chit = 0
+ tx.confidence = 0
+
+ tx.parents.forEach {
+ if (!transactions.contains(it)) {
+ val t = sender.onSendTx(it)
+ onReceiveTx(sender, t)
+ }
+ }
+
+ if (!conflicts.contains(tx.data)) {
+ conflicts[tx.data] = ConflictSet(tx, tx, 0, 1)
+ } else {
+ conflicts[tx.data]!!.size++
+ }
+
+ transactions[tx.id] = tx
+ }
+
+ fun onSendTx(id: UUID): Transaction {
+ return transactions[id]!!.copy()
+ }
+
+ fun onQuery(sender: Node, tx: Transaction): Int {
+ onReceiveTx(sender, tx)
+ return if (isStronglyPreferred(tx)) 1
+ else 0
+ }
+
+ fun avalancheLoop() {
+ val txs = transactions.values.filterNot { queried.contains(it.id) }
+ txs.forEach { tx ->
+ val sample = network.nodes.filterNot { it == this }.shuffled(rng).take(k)
+ val res = sample.map {
+ val txCopy = tx.copy()
+ it.onQuery(this, txCopy)
+ }.sum()
+ if (res >= alpha * k) {
+ tx.chit = 1
+ // Update the preference for ancestors.
+ parentSet(tx).forEach { p ->
+ p.confidence += 1
+ }
+ parentSet(tx).forEach { p->
+ val cs = conflicts[p.data]!!
+ if (p.confidence > cs.pref.confidence) {
+ cs.pref = p
+ }
+ if (p != cs.last) {
+ cs.last = p
+ cs.count = 0
+ } else {
+ cs.count++
+ }
+ }
+ }
+ queried.add(tx.id)
+ }
+ }
+
+ fun isPreferred(tx: Transaction): Boolean {
+ return conflicts[tx.data]!!.pref == tx
+ }
+
+ fun isStronglyPreferred(tx: Transaction): Boolean {
+ return parentSet(tx).map { isPreferred(it) }.all { it }
+ }
+
+ fun isAccepted(tx: Transaction): Boolean {
+ if (accepted.contains(tx.id)) return true
+ if (!queried.contains(tx.id)) return false
+
+ val cs = conflicts[tx.data]!!
+ val parentsAccepted = tx.parents.map { accepted.contains(it) }.all { it }
+ val isAccepted = (parentsAccepted && cs.size == 1 && tx.confidence > beta1) ||
+ (cs.pref == tx && cs.count > beta2)
+ if (isAccepted) accepted.add(tx.id)
+ return isAccepted
+ }
+
+ fun parentSet(tx: Transaction): Set {
+
+ if (parentSets.contains(tx.id)) return parentSets[tx.id]!!
+
+ val parents = mutableSetOf()
+ var ps = tx.parents.toSet()
+ while (ps.isNotEmpty()) {
+ ps.forEach {
+ if (transactions.contains(it)) parents.add(transactions[it]!!)
+ }
+ ps = ps.flatMap {
+ if (transactions.contains(it)) {
+ transactions[it]!!.parents
+ } else {
+ emptyList()
+ }
+ }.toSet()
+ }
+ parentSets[tx.id] = parents
+ return parents
+ }
+
+ fun parentSelection(): List {
+ val eps0 = transactions.values.filter { isStronglyPreferred(it) }
+ val eps1 = eps0.filter { conflicts[it.data]!!.size == 1 || it.confidence > 0 }
+ val parents = eps1.flatMap { parentSet(it) }.toSet().filterNot { eps1.contains(it) }
+ val fallback = if (transactions.size == 1) listOf(genesisTx)
+ else transactions.values.reversed().take(10).filter { !isAccepted(it) && conflicts[it.data]!!.size == 1 }.shuffled(network.rng).take(3)
+ require(parents.isNotEmpty() || fallback.isNotEmpty()) { "Unable to select parents." }
+ return if (parents.isEmpty()) return fallback else parents
+ }
+
+ fun dumpDag(f: File) {
+ f.printWriter().use { out ->
+ out.println("digraph G {")
+ transactions.values.forEach {
+ val color = if (isAccepted(it)) "color=lightblue; style=filled;" else ""
+ val pref = if (conflicts[it.data]!!.size > 1 && isPreferred(it)) "*" else ""
+ val chit = if (queried.contains(it.id)) it.chit.toString() else "?"
+ out.println("\"${it.id}\" [$color label=\"${it.data}$pref, $chit, ${it.confidence}\"];")
+ }
+ transactions.values.forEach {
+ it.parents.forEach { p->
+ out.println("\"${it.id}\" -> \"$p\";")
+ }
+ }
+ out.println("}")
+ }
+ }
+}
diff --git a/experimental/avalanche/src/main/kotlin/net/corda/avalanche/Parameters.kt b/experimental/avalanche/src/main/kotlin/net/corda/avalanche/Parameters.kt
new file mode 100644
index 0000000000..8d13fcf799
--- /dev/null
+++ b/experimental/avalanche/src/main/kotlin/net/corda/avalanche/Parameters.kt
@@ -0,0 +1,38 @@
+package net.corda.avalanche
+
+import picocli.CommandLine
+
+class Parameters {
+ @CommandLine.Option(names = ["-n", "--num-transactions"], description = ["How many transactions to generate (default: 20)"])
+ var nrTransactions: Int = 20
+
+ @CommandLine.Option(names = ["-d", "--double-spend-ratio"], description = ["The double spend ratio (default: 0.02)"])
+ var doubleSpendRatio: Double = 0.02
+
+ @CommandLine.Option(names = ["-a", "--alpha"], description = ["The alpha parameter (default: 0.8)"])
+ var alpha = 0.8
+
+ @CommandLine.Option(names = ["--num-nodes"], description = ["The number of nodes (default: 50)"])
+ var nrNodes = 50
+
+ @CommandLine.Option(names = ["-k", "--sample-size"], description = ["The sample size (default `1 + nrNodes / 10`)"])
+ var k = 1 + nrNodes / 10
+
+ @CommandLine.Option(names = ["--beta1"], description = ["The beta1 parameter (default: 5)"])
+ var beta1 = 5
+
+ @CommandLine.Option(names = ["--beta2"], description = ["The beta1 parameter (default: 5)"])
+ var beta2 = 5
+
+ @CommandLine.Option(names = ["-h", "--help"], usageHelp = true, description = ["Display help and exit"])
+ var helpRequested = false
+
+ @CommandLine.Option(names = ["--seed"], description = ["The RNG seed (default: 23)"])
+ var seed = 23L
+
+ @CommandLine.Option(names = ["--dump-dags"], description = ["Dump DAGs in dot format (default: false)"])
+ var dumpDags = false
+
+ @CommandLine.Option(names = ["-v", "--verbose"], description=["Verbose mode (default: false)"])
+ var verbose = false
+}
diff --git a/node/src/integration-test/kotlin/net/corda/node/BootTests.kt b/node/src/integration-test/kotlin/net/corda/node/BootTests.kt
index 1842f0ee63..01595469f6 100644
--- a/node/src/integration-test/kotlin/net/corda/node/BootTests.kt
+++ b/node/src/integration-test/kotlin/net/corda/node/BootTests.kt
@@ -74,9 +74,7 @@ class BootTests : IntegrationTest() {
@Test
fun `double node start doesn't write into log file`() {
- val logConfigFile = projectRootDir / "config" / "dev" / "log4j2.xml"
- assertThat(logConfigFile).isRegularFile()
- driver(DriverParameters(isDebug = true, systemProperties = mapOf("log4j.configurationFile" to logConfigFile.toString()))) {
+ driver(DriverParameters(isDebug = true)) {
val alice = startNode(providedName = ALICE_NAME).get()
val logFolder = alice.baseDirectory / NodeStartup.LOGS_DIRECTORY_NAME
val logFile = logFolder.list { it.filter { it.fileName.toString().endsWith(".log") }.findAny().get() }
diff --git a/node/src/main/kotlin/net/corda/node/services/persistence/NodeAttachmentService.kt b/node/src/main/kotlin/net/corda/node/services/persistence/NodeAttachmentService.kt
index 0ad2814b31..b88782b9cc 100644
--- a/node/src/main/kotlin/net/corda/node/services/persistence/NodeAttachmentService.kt
+++ b/node/src/main/kotlin/net/corda/node/services/persistence/NodeAttachmentService.kt
@@ -122,7 +122,7 @@ class NodeAttachmentService(
@ElementCollection
@Column(name = "contract_class_name", nullable = false)
- @CollectionTable(name = "node_attchments_contracts", joinColumns = [(JoinColumn(name = "att_id", referencedColumnName = "att_id"))],
+ @CollectionTable(name = "${NODE_DATABASE_PREFIX}attachments_contracts", joinColumns = [(JoinColumn(name = "att_id", referencedColumnName = "att_id"))],
foreignKey = ForeignKey(name = "FK__ctr_class__attachments"))
var contractClassNames: List? = null
) : Serializable
diff --git a/node/src/main/resources/migration/node-core.changelog-master.xml b/node/src/main/resources/migration/node-core.changelog-master.xml
index 2ef60795b7..2e039e8fae 100644
--- a/node/src/main/resources/migration/node-core.changelog-master.xml
+++ b/node/src/main/resources/migration/node-core.changelog-master.xml
@@ -16,5 +16,5 @@
-
+
diff --git a/node/src/main/resources/migration/node-core.changelog-v3-GA.xml b/node/src/main/resources/migration/node-core.changelog-v3-GA.xml
new file mode 100644
index 0000000000..204fdaec6e
--- /dev/null
+++ b/node/src/main/resources/migration/node-core.changelog-v3-GA.xml
@@ -0,0 +1,12 @@
+
+
+
+
+
+
+
diff --git a/samples/simm-valuation-demo/src/integration-test/kotlin/net/corda/vega/SimmValuationTest.kt b/samples/simm-valuation-demo/src/integration-test/kotlin/net/corda/vega/SimmValuationTest.kt
index 059dadb433..7d31052947 100644
--- a/samples/simm-valuation-demo/src/integration-test/kotlin/net/corda/vega/SimmValuationTest.kt
+++ b/samples/simm-valuation-demo/src/integration-test/kotlin/net/corda/vega/SimmValuationTest.kt
@@ -12,9 +12,11 @@ package net.corda.vega
import com.opengamma.strata.product.common.BuySell
import net.corda.core.identity.CordaX500Name
+import net.corda.core.internal.div
import net.corda.core.internal.packageName
import net.corda.core.utilities.getOrThrow
import net.corda.serialization.internal.amqp.AbstractAMQPSerializationScheme
+import net.corda.testing.common.internal.ProjectStructure.projectRootDir
import net.corda.testing.core.DUMMY_BANK_A_NAME
import net.corda.testing.core.DUMMY_BANK_B_NAME
import net.corda.testing.core.DUMMY_NOTARY_NAME
@@ -64,7 +66,13 @@ class SimmValuationTest : IntegrationTest() {
@Test
fun `runs SIMM valuation demo`() {
- driver(DriverParameters(isDebug = true, extraCordappPackagesToScan = listOf("net.corda.vega.contracts", "net.corda.vega.plugin.customserializers"))) {
+ val logConfigFile = projectRootDir / "samples" / "simm-valuation-demo" / "src" / "main" / "resources" / "log4j2.xml"
+ assertThat(logConfigFile).isRegularFile()
+ driver(DriverParameters(
+ isDebug = true,
+ extraCordappPackagesToScan = listOf("net.corda.vega.contracts", "net.corda.vega.plugin.customserializers"),
+ systemProperties = mapOf("log4j.configurationFile" to logConfigFile.toString()))
+ ) {
val nodeAFuture = startNode(providedName = nodeALegalName)
val nodeBFuture = startNode(providedName = nodeBLegalName)
val (nodeA, nodeB) = listOf(nodeAFuture, nodeBFuture).map { it.getOrThrow() }
diff --git a/settings.gradle b/settings.gradle
index bedcf5c4cb..e19af34b00 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -28,6 +28,7 @@ include 'client:rpc'
include 'webserver'
include 'webserver:webcapsule'
include 'experimental'
+include 'experimental:avalanche'
include 'experimental:behave'
include 'experimental:sandbox'
include 'experimental:quasar-hook'
diff --git a/testing/node-driver/src/main/kotlin/net/corda/testing/node/internal/ProcessUtilities.kt b/testing/node-driver/src/main/kotlin/net/corda/testing/node/internal/ProcessUtilities.kt
index 9998c9ef2f..fa97cd88cc 100644
--- a/testing/node-driver/src/main/kotlin/net/corda/testing/node/internal/ProcessUtilities.kt
+++ b/testing/node-driver/src/main/kotlin/net/corda/testing/node/internal/ProcessUtilities.kt
@@ -32,9 +32,7 @@ object ProcessUtilities {
workingDirectory: Path?,
maximumHeapSize: String
): Process {
- // FIXME: Instead of hacking our classpath, use the correct classpath for className.
- val classpath = defaultClassPath.split(pathSeparator).filter { !(it / "log4j2-test.xml").exists() }.joinToString(pathSeparator)
- return startJavaProcessImpl(className, arguments, classpath, jdwpPort, extraJvmArguments, workingDirectory, maximumHeapSize)
+ return startJavaProcessImpl(className, arguments, defaultClassPath, jdwpPort, extraJvmArguments, workingDirectory, maximumHeapSize)
}
fun startJavaProcessImpl(
diff --git a/testing/test-common/src/main/resources/log4j2-test.xml b/testing/test-common/src/main/resources/log4j2-test.xml
index 55e4799ff2..c8e3cda828 100644
--- a/testing/test-common/src/main/resources/log4j2-test.xml
+++ b/testing/test-common/src/main/resources/log4j2-test.xml
@@ -10,13 +10,20 @@
-->
+
- info
+ ${sys:log-path:-logs}
+ node-${hostName}
+ ${log-path}/archive
+ ${sys:defaultLogLevel:-info}
+
+
+
-
+
-
+
+
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
+
+
+
+
+
+
+
+
diff --git a/testing/test-utils/src/main/resources/database-scripts/azure-sql/db-global-cleanup.sql b/testing/test-utils/src/main/resources/database-scripts/azure-sql/db-global-cleanup.sql
index d6174089c0..7d770f34de 100644
--- a/testing/test-utils/src/main/resources/database-scripts/azure-sql/db-global-cleanup.sql
+++ b/testing/test-utils/src/main/resources/database-scripts/azure-sql/db-global-cleanup.sql
@@ -4,7 +4,7 @@ DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.dummy_linear_state_parts;
DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_parts;
DROP TABLE IF EXISTS ${schema}.dummy_deal_states_parts;
-DROP TABLE IF EXISTS ${schema}.node_attchments_contracts;
+DROP TABLE IF EXISTS ${schema}.node_attachments_contracts;
DROP TABLE IF EXISTS ${schema}.node_attachments;
DROP TABLE IF EXISTS ${schema}.node_checkpoints;
DROP TABLE IF EXISTS ${schema}.node_transactions;
diff --git a/testing/test-utils/src/main/resources/database-scripts/azure-sql/db-setup.sql b/testing/test-utils/src/main/resources/database-scripts/azure-sql/db-setup.sql
index 9604bc1854..dbbb9527ef 100644
--- a/testing/test-utils/src/main/resources/database-scripts/azure-sql/db-setup.sql
+++ b/testing/test-utils/src/main/resources/database-scripts/azure-sql/db-setup.sql
@@ -4,7 +4,7 @@ DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.dummy_linear_state_parts;
DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_parts;
DROP TABLE IF EXISTS ${schema}.dummy_deal_states_parts;
-DROP TABLE IF EXISTS ${schema}.node_attchments_contracts;
+DROP TABLE IF EXISTS ${schema}.node_attachments_contracts;
DROP TABLE IF EXISTS ${schema}.node_attachments;
DROP TABLE IF EXISTS ${schema}.node_checkpoints;
DROP TABLE IF EXISTS ${schema}.node_transactions;
diff --git a/testing/test-utils/src/main/resources/database-scripts/oracle/db-cleanup.sql b/testing/test-utils/src/main/resources/database-scripts/oracle/db-cleanup.sql
index dec3903578..97d27f2336 100644
--- a/testing/test-utils/src/main/resources/database-scripts/oracle/db-cleanup.sql
+++ b/testing/test-utils/src/main/resources/database-scripts/oracle/db-cleanup.sql
@@ -4,7 +4,7 @@ DROP TABLE ${schema}.cp_states_v2_participants CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_linear_state_parts CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_linear_states_v2_parts CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_deal_states_parts CASCADE CONSTRAINTS
-DROP TABLE ${schema}.node_attchments_contracts CASCADE CONSTRAINTS
+DROP TABLE ${schema}.node_attachments_contracts CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_attachments CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_checkpoints CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_transactions CASCADE CONSTRAINTS
diff --git a/testing/test-utils/src/main/resources/database-scripts/oracle/db-setup.sql b/testing/test-utils/src/main/resources/database-scripts/oracle/db-setup.sql
index 3ecb1c17c8..ccece802e2 100644
--- a/testing/test-utils/src/main/resources/database-scripts/oracle/db-setup.sql
+++ b/testing/test-utils/src/main/resources/database-scripts/oracle/db-setup.sql
@@ -6,7 +6,7 @@ DROP TABLE ${schema}.dummy_linear_states_v2_parts CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_deal_states_parts CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_test_states_parts CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_test_states CASCADE CONSTRAINTS
-DROP TABLE ${schema}.node_attchments_contracts CASCADE CONSTRAINTS
+DROP TABLE ${schema}.node_attachments_contracts CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_attachments CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_checkpoints CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_transactions CASCADE CONSTRAINTS
diff --git a/testing/test-utils/src/main/resources/database-scripts/sql-server/db-global-cleanup.sql b/testing/test-utils/src/main/resources/database-scripts/sql-server/db-global-cleanup.sql
index 7292a1e983..6dd0ef1510 100644
--- a/testing/test-utils/src/main/resources/database-scripts/sql-server/db-global-cleanup.sql
+++ b/testing/test-utils/src/main/resources/database-scripts/sql-server/db-global-cleanup.sql
@@ -4,7 +4,7 @@ DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.dummy_linear_state_parts;
DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_parts;
DROP TABLE IF EXISTS ${schema}.dummy_deal_states_parts;
-DROP TABLE IF EXISTS ${schema}.node_attchments_contracts;
+DROP TABLE IF EXISTS ${schema}.node_attachments_contracts;
DROP TABLE IF EXISTS ${schema}.node_attachments;
DROP TABLE IF EXISTS ${schema}.node_checkpoints;
DROP TABLE IF EXISTS ${schema}.node_transactions;
diff --git a/testing/test-utils/src/main/resources/database-scripts/sql-server/db-global-setup.sql b/testing/test-utils/src/main/resources/database-scripts/sql-server/db-global-setup.sql
index b6d26fd681..03fdaa0d8f 100644
--- a/testing/test-utils/src/main/resources/database-scripts/sql-server/db-global-setup.sql
+++ b/testing/test-utils/src/main/resources/database-scripts/sql-server/db-global-setup.sql
@@ -4,7 +4,7 @@ DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.dummy_linear_state_parts;
DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_parts;
DROP TABLE IF EXISTS ${schema}.dummy_deal_states_parts;
-DROP TABLE IF EXISTS ${schema}.node_attchments_contracts;
+DROP TABLE IF EXISTS ${schema}.node_attachments_contracts;
DROP TABLE IF EXISTS ${schema}.node_attachments;
DROP TABLE IF EXISTS ${schema}.node_checkpoints;
DROP TABLE IF EXISTS ${schema}.node_transactions;
diff --git a/testing/test-utils/src/main/resources/database-scripts/sql-server/db-setup.sql b/testing/test-utils/src/main/resources/database-scripts/sql-server/db-setup.sql
index 1ae9fb5dd8..02f97e118f 100644
--- a/testing/test-utils/src/main/resources/database-scripts/sql-server/db-setup.sql
+++ b/testing/test-utils/src/main/resources/database-scripts/sql-server/db-setup.sql
@@ -4,7 +4,7 @@ DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.dummy_linear_state_parts;
DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_parts;
DROP TABLE IF EXISTS ${schema}.dummy_deal_states_parts;
-DROP TABLE IF EXISTS ${schema}.node_attchments_contracts;
+DROP TABLE IF EXISTS ${schema}.node_attachments_contracts;
DROP TABLE IF EXISTS ${schema}.node_attachments;
DROP TABLE IF EXISTS ${schema}.node_checkpoints;
DROP TABLE IF EXISTS ${schema}.node_transactions;
diff --git a/tools/demobench/src/main/kotlin/net/corda/demobench/model/InstallFactory.kt b/tools/demobench/src/main/kotlin/net/corda/demobench/model/InstallFactory.kt
index 28276c2f41..3ea7c25306 100644
--- a/tools/demobench/src/main/kotlin/net/corda/demobench/model/InstallFactory.kt
+++ b/tools/demobench/src/main/kotlin/net/corda/demobench/model/InstallFactory.kt
@@ -14,6 +14,7 @@ import com.typesafe.config.Config
import net.corda.core.internal.deleteRecursively
import net.corda.core.internal.div
import net.corda.core.utilities.NetworkHostAndPort
+import net.corda.nodeapi.internal.config.UnknownConfigKeysPolicy
import net.corda.nodeapi.internal.config.parseAs
import tornadofx.*
import java.io.IOException
@@ -29,7 +30,7 @@ class InstallFactory : Controller() {
require(nodeController.isPortValid(port)) { "Invalid port $port" }
}
- val nodeConfig = config.parseAs()
+ val nodeConfig = config.parseAs(UnknownConfigKeysPolicy.IGNORE::handle)
nodeConfig.p2pAddress.checkPort()
nodeConfig.rpcAddress.checkPort()
nodeConfig.webAddress.checkPort()
diff --git a/tools/demobench/src/main/kotlin/net/corda/demobench/model/NodeConfig.kt b/tools/demobench/src/main/kotlin/net/corda/demobench/model/NodeConfig.kt
index 881499dd33..f66fbde2a4 100644
--- a/tools/demobench/src/main/kotlin/net/corda/demobench/model/NodeConfig.kt
+++ b/tools/demobench/src/main/kotlin/net/corda/demobench/model/NodeConfig.kt
@@ -43,7 +43,10 @@ data class NodeConfig(
val issuableCurrencies: List = emptyList(),
/** Pass-through for generating node.conf with external DB */
val dataSourceProperties: Properties? = null,
- val database: Properties? = null
+ val database: Properties? = null,
+ private val devMode: Boolean = true,
+ private val detectPublicIp: Boolean = false,
+ private val useTestClock: Boolean = true
) {
companion object {
val renderOptions: ConfigRenderOptions = ConfigRenderOptions.defaults().setOriginComments(false)
@@ -51,14 +54,9 @@ data class NodeConfig(
const val cordappDirName = "cordapps"
}
- @Suppress("unused")
- private val detectPublicIp = false
- @Suppress("unused")
- private val useTestClock = true
-
fun nodeConf(): Config {
- val basic = NodeConfigurationData(myLegalName, p2pAddress, rpcAddress, notary, h2port, rpcUsers, useTestClock, detectPublicIp).toConfig()
+ val basic = NodeConfigurationData(myLegalName, p2pAddress, rpcAddress, notary, h2port, rpcUsers, useTestClock, detectPublicIp, devMode).toConfig()
val rpcSettings = empty()
.withValue("address", ConfigValueFactory.fromAnyRef(rpcAddress.toString()))
.withValue("adminAddress", ConfigValueFactory.fromAnyRef(rpcAdminAddress.toString()))
@@ -88,7 +86,8 @@ private data class NodeConfigurationData(
val h2port: Int,
val rpcUsers: List = listOf(NodeConfig.defaultUser),
val useTestClock: Boolean,
- val detectPublicIp: Boolean
+ val detectPublicIp: Boolean,
+ val devMode: Boolean
)
private data class WebServerConfigurationData(
diff --git a/tools/demobench/src/main/kotlin/net/corda/demobench/views/NodeTerminalView.kt b/tools/demobench/src/main/kotlin/net/corda/demobench/views/NodeTerminalView.kt
index 4a580cb1b3..9ccfd6d659 100644
--- a/tools/demobench/src/main/kotlin/net/corda/demobench/views/NodeTerminalView.kt
+++ b/tools/demobench/src/main/kotlin/net/corda/demobench/views/NodeTerminalView.kt
@@ -23,6 +23,7 @@ import javafx.scene.layout.HBox
import javafx.scene.layout.StackPane
import javafx.scene.layout.VBox
import javafx.util.Duration
+import net.corda.client.rpc.RPCException
import net.corda.core.concurrent.match
import net.corda.core.contracts.ContractState
import net.corda.core.messaging.CordaRPCOps
@@ -211,13 +212,25 @@ class NodeTerminalView : Fragment() {
}
val fxScheduler = Schedulers.from(Platform::runLater)
- subscriptions.add(txNext.observeOn(fxScheduler).subscribe {
+ subscriptions.add(txNext.observeOn(fxScheduler).subscribe({
transactions.value = (++txCount).toString()
- })
- subscriptions.add(stateNext.observeOn(fxScheduler).subscribe {
+ }, { error ->
+ if (error is RPCException && error.message?.contains("Connection failure detected") == true) {
+ // Ignore this ^^^, it only happens when we shutdown a node in Demobench.
+ } else {
+ throw error
+ }
+ }))
+ subscriptions.add(stateNext.observeOn(fxScheduler).subscribe({
stateCount += (it.produced.size - it.consumed.size)
states.value = stateCount.toString()
- })
+ }, { error ->
+ if (error is RPCException && error.message?.contains("Connection failure detected") == true) {
+ // Ignore this ^^^, it only happens when we shutdown a node in Demobench.
+ } else {
+ throw error
+ }
+ }))
} catch (e: Exception) {
log.log(Level.WARNING, "RPC failed: ${e.message}", e)
}