Merge pull request #919 from corda/merges/os-2018-06-04-szymon

OS -> ENT merge on 2018-06-04
This commit is contained in:
szymonsztuka 2018-06-04 22:16:26 +01:00 committed by GitHub
commit 993737aecc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
28 changed files with 507 additions and 76 deletions

2
.idea/compiler.xml generated
View File

@ -7,6 +7,8 @@
<module name="attachment-demo_integrationTest" target="1.8" />
<module name="attachment-demo_main" target="1.8" />
<module name="attachment-demo_test" target="1.8" />
<module name="avalanche_main" target="1.8" />
<module name="avalanche_test" target="1.8" />
<module name="bank-of-corda-demo_integrationTest" target="1.8" />
<module name="bank-of-corda-demo_main" target="1.8" />
<module name="bank-of-corda-demo_test" target="1.8" />

View File

@ -12,11 +12,10 @@
<Configuration status="info">
<Properties>
<Property name="log-path">logs</Property>
<Property name="log-path">${sys:log-path:-logs}</Property>
<Property name="log-name">node-${hostName}</Property>
<Property name="archive">${sys:log-path}/archive</Property>
<Property name="consoleLogLevel">error</Property>
<Property name="defaultLogLevel">info</Property>
<Property name="archive">${log-path}/archive</Property>
<Property name="defaultLogLevel">${sys:defaultLogLevel:-info}</Property>
</Properties>
<ThresholdFilter level="trace"/>
@ -24,7 +23,7 @@
<Appenders>
<Console name="Console-Appender" target="SYSTEM_OUT">
<PatternLayout>
<ScriptPatternSelector defaultPattern="%highlight{%level{length=1} %date{HH:mm:ssZ} [%t] %c{2}.%method - %msg%n}{INFO=white,WARN=red,FATAL=bright red}">
<ScriptPatternSelector defaultPattern="%highlight{[%level{length=5}] %date{HH:mm:ssZ} [%t] %c{2}.%method - %msg%n}{INFO=white,WARN=red,FATAL=bright red}">
<Script name="MDCSelector" language="javascript"><![CDATA[
result = null;
if (!logEvent.getContextData().size() == 0) {
@ -35,7 +34,7 @@
result;
]]>
</Script>
<PatternMatch key="WithMDC" pattern="%highlight{%level{length=1} %date{HH:mm:ssZ} [%t] %c{2}.%method - %msg %X%n}{INFO=white,WARN=red,FATAL=bright red}"/>
<PatternMatch key="WithMDC" pattern="%highlight{[%level{length=5}] %date{HH:mm:ssZ} [%t] %c{2}.%method - %msg %X%n}{INFO=white,WARN=red,FATAL=bright red}"/>
</ScriptPatternSelector>
</PatternLayout>
<ThresholdFilter level="trace"/>
@ -49,7 +48,7 @@
<!-- Will generate up to 100 log files for a given day. During every rollover it will delete
those that are older than 60 days, but keep the most recent 10 GB -->
<RollingFile name="RollingFile-Appender"
fileName="${sys:log-path}/${log-name}.log"
fileName="${log-path}/${log-name}.log"
filePattern="${archive}/${log-name}.%date{yyyy-MM-dd}-%i.log.gz">
<PatternLayout pattern="[%-5level] %date{ISO8601}{UTC}Z [%t] %c{2}.%method - %msg %X%n"/>
@ -74,15 +73,23 @@
</Appenders>
<Loggers>
<Root level="${sys:defaultLogLevel}">
<AppenderRef ref="Console-Appender" level="${sys:consoleLogLevel}"/>
<AppenderRef ref="RollingFile-Appender" />
<Root level="info">
<AppenderRef ref="Console-Appender"/>
</Root>
<Logger name="net.corda" level="${defaultLogLevel}" additivity="false">
<AppenderRef ref="Console-Appender"/>
<AppenderRef ref="RollingFile-Appender" />
</Logger>
<Logger name="BasicInfo" additivity="false">
<AppenderRef ref="Console-Appender-Println"/>
<AppenderRef ref="RollingFile-Appender" />
</Logger>
<Logger name="org.hibernate.SQL" level="info" additivity="false">
<AppenderRef ref="Console-Appender"/>
<AppenderRef ref="RollingFile-Appender"/>
</Logger>
<Logger name="org.apache.activemq.artemis.core.server" level="error" additivity="false">
<AppenderRef ref="Console-Appender"/>
<AppenderRef ref="RollingFile-Appender"/>
</Logger>
<Logger name="org.jolokia" additivity="true" level="warn">

View File

@ -14,8 +14,12 @@ To run simply pass in the file or URL as the first parameter:
Use the ``--help`` flag for a full list of command line options.
``SerializedBytes`
~~~~~~~~~~~~~~~~~~
When inspecting your custom data structures, there's no need to include the jars containing the class definitions for them
in the classpath. The blob inspector (or rather the serialization framework) is able to synthesis any classes found in the
blob that aren't on the classpath.
SerializedBytes
~~~~~~~~~~~~~~~
One thing to note is that the binary blob may contain embedded ``SerializedBytes`` objects. Rather than printing these
out as a Base64 string, the blob inspector will first materialise them into Java objects and then output those. You will
@ -23,41 +27,41 @@ see this when dealing with classes such as ``SignedData`` or other structures th
``nodeInfo-*`` files or the ``network-parameters`` file in the node's directory. For example, the output of a node-info
file may look like:
.. container:: codeset
.. sourcecode:: yaml
**-\\-format=YAML**
::
net.corda.nodeapi.internal.SignedNodeInfo
---
raw:
class: "net.corda.core.node.NodeInfo"
deserialized:
addresses:
- "localhost:10011"
legalIdentitiesAndCerts:
- "O=BankOfCorda, L=New York, C=US"
platformVersion: 4
serial: 1527074180971
signatures:
- !!binary |
dmoAnnzcv0MzRN+3ZSCDcCJIAbXnoYy5mFWB3Nijndzu/dzIoYdIawINXbNSY/5z2XloDK01vZRV
TreFZCbZAg==
net.corda.nodeapi.internal.SignedNodeInfo
---
raw:
class: "net.corda.core.node.NodeInfo"
deserialized:
addresses:
- "localhost:10005"
legalIdentitiesAndCerts:
- "O=BankOfCorda, L=London, C=GB"
platformVersion: 4
serial: 1527851068715
signatures:
- !!binary |-
VFRy4frbgRDbCpK1Vo88PyUoj01vbRnMR3ROR2abTFk7yJ14901aeScX/CiEP+CDGiMRsdw01cXt\nhKSobAY7Dw==
.. sourcecode:: json
**-\\-format=JSON**
::
net.corda.nodeapi.internal.SignedNodeInfo
{
"raw" : {
"class" : "net.corda.core.node.NodeInfo",
"deserialized" : {
"addresses" : [ "localhost:10011" ],
"legalIdentitiesAndCerts" : [ "O=BankOfCorda, L=New York, C=US" ],
"platformVersion" : 4,
"serial" : 1527074180971
}
},
"signatures" : [ "dmoAnnzcv0MzRN+3ZSCDcCJIAbXnoYy5mFWB3Nijndzu/dzIoYdIawINXbNSY/5z2XloDK01vZRVTreFZCbZAg==" ]
net.corda.nodeapi.internal.SignedNodeInfo
{
"raw" : {
"class" : "net.corda.core.node.NodeInfo",
"deserialized" : {
"addresses" : [ "localhost:10005" ],
"legalIdentitiesAndCerts" : [ "O=BankOfCorda, L=London, C=GB" ],
"platformVersion" : 4,
"serial" : 1527851068715
}
},
"signatures" : [ "VFRy4frbgRDbCpK1Vo88PyUoj01vbRnMR3ROR2abTFk7yJ14901aeScX/CiEP+CDGiMRsdw01cXthKSobAY7Dw==" ]
}
Notice the file is actually a serialised ``SignedNodeInfo`` object, which has a ``raw`` property of type ``SerializedBytes<NodeInfo>``.
This property is materialised into a ``NodeInfo`` and is output under the ``deserialized`` field.

View File

@ -8,6 +8,8 @@ Unreleased
==========
* Introduced a hierarchy of ``DatabaseMigrationException``s, allowing ``NodeStartup`` to gracefully inform users of problems related to database migrations before exiting with a non-zero code.
* Fixed an issue preventing out of process nodes started by the ``Driver`` from logging to file.
* Fixed an issue with ``CashException`` not being able to deserialise after the introduction of AMQP for RPC.
* Removed -xmx VM argument from Explorer's Capsule setup. This helps avoiding out of memory errors.
@ -120,6 +122,8 @@ Unreleased
For instance, this method will check if an ECC key lies on a valid curve or if an RSA key is >= 2048bits. This might
be required for extra key validation checks, e.g., for Doorman to check that a CSR key meets the minimum security requirements.
* Table name with a typo changed from ``NODE_ATTCHMENTS_CONTRACTS`` to ``NODE_ATTACHMENTS_CONTRACTS``.
.. _changelog_v3.1:
Version 3.1

View File

@ -68,6 +68,12 @@ UNRELEASED
No action is needed for default node tables as ``PersistentStateRef`` is used as Primary Key only and the backing columns are automatically not nullable
or custom Cordapp entities using ``PersistentStateRef`` as Primary Key.
* H2 database upgrade - the table with a typo has been change, for each database instance and schema run the following SQL statement:
ALTER TABLE [schema].NODE_ATTCHMENTS_CONTRACTS RENAME TO NODE_ATTACHMENTS_CONTRACTS;
Schema is optional, run SQL when the node is not running.
Upgrading to Corda Enterprise 3.0 Developer Preview
---------------------------------------------------
A prerequisite to upgrade to Corda Enterprise 3.0 is to ensure your CorDapp is upgraded to Open Source Corda V3.x.

View File

@ -0,0 +1,24 @@
# Avalanche Simulation
Experimental simulation of the Avalanche protocol by Team Rocket. This
implementation is incomplete.
The paper: [Snowflake to Avalanche: A Novel Metastable Consensus Protocol Family for
Cryptocurrencies](https://ipfs.io/ipfs/QmUy4jh5mGNZvLkjies1RWM4YuvJh5o2FYopNPVYwrRVGV).
## Running the Simulation
```
./gradlew shadowJar
java -jar build/libs/avalanche-all.jar --dump-dags
```
### Visualising the DAGs
```
for f in node-0-*.dot; do dot -Tpng -O $f; done
```
The above command generates a number of PNG files `node-0-*.png`, showing the
evolution of the DAG. The nodes are labeled with the ID of the spent state,
the chit and confidence values. The prefered transaction of a conflict set is
labelled with a star. Accepted transactions are blue.
![DAG](./images/node-0-003.dot.png)

View File

@ -0,0 +1,42 @@
buildscript {
ext.kotlin_version = '1.2.40'
repositories {
mavenCentral()
jcenter()
}
dependencies {
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.3'
}
}
plugins {
id "org.jetbrains.kotlin.jvm"
id 'com.github.johnrengelman.shadow' version '2.0.3'
id 'java'
id 'application'
}
repositories {
mavenCentral()
}
dependencies {
compile "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlin_version"
compile group: 'info.picocli', name: 'picocli', version: '3.0.1'
testCompile group: 'junit', name: 'junit', version: '4.12'
}
compileKotlin {
kotlinOptions {
jvmTarget = "1.8"
}
}
compileTestKotlin {
kotlinOptions {
jvmTarget = "1.8"
}
}
mainClassName = "net.corda.avalanche.MainKt"
shadowJar {
baseName = "avalanche"
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

View File

@ -0,0 +1,231 @@
package net.corda.avalanche
import picocli.CommandLine
import java.io.File
import java.util.*
import kotlin.collections.LinkedHashMap
fun main(args: Array<String>) {
val parameters = Parameters()
CommandLine(parameters).parse(*args)
if (parameters.helpRequested) {
CommandLine.usage(Parameters(), System.out)
return
}
val network = Network(parameters)
val n1 = network.nodes[0]
val c1 = mutableListOf<Transaction>()
val c2 = mutableListOf<Transaction>()
repeat(parameters.nrTransactions) {
val n = network.nodes.shuffled(network.rng).first()
c1.add(n.onGenerateTx(it))
if (network.rng.nextDouble() < parameters.doubleSpendRatio) {
val d = network.rng.nextInt(it)
println("double spend of $d")
val n2 = network.nodes.shuffled(network.rng).first()
c2.add(n2.onGenerateTx(d))
}
network.run()
if (parameters.dumpDags) {
n1.dumpDag(File("node-0-${String.format("%03d", it)}.dot"))
}
println("$it: " + String.format("%.3f", fractionAccepted(n1)))
}
val conflictSets = (c1 + c2).groupBy { it.data }.filterValues { it.size > 1 }
conflictSets.forEach { v, txs ->
val acceptance = txs.map { t -> network.nodes.map { it.isAccepted(t) }.any { it } }
require(acceptance.filter { it }.size < 2) { "More than one transaction of the conflict set of $v got accepted." }
}
}
fun fractionAccepted(n: Node): Double {
val accepted = n.transactions.values.filter { n.isAccepted(it) }.size
return accepted.toDouble() / n.transactions.size
}
data class Transaction(
val id: UUID,
val data: Int,
val parents: List<UUID>,
var chit: Int = 0,
var confidence: Int = 0) {
override fun toString(): String {
return "T(id=${id.toString().take(5)}, data=$data, parents=[${parents.map {it.toString().take(5) }}, chit=$chit, confidence=$confidence)"
}
}
data class ConflictSet(
var pref: Transaction,
var last: Transaction,
var count: Int,
var size: Int
)
class Network(val parameters: Parameters) {
val rng = Random(parameters.seed)
val tx = Transaction(UUID.randomUUID(), -1, emptyList(), 1)
val nodes = (0..parameters.nrNodes).map { Node(it, parameters, tx.copy(),this, rng) }
fun run() {
nodes.forEach { it.avalancheLoop() }
}
}
class Node(val id: Int, parameters: Parameters, val genesisTx: Transaction, val network: Network, val rng: Random) {
val alpha = parameters.alpha
val k = parameters.k
val beta1 = parameters.beta1
val beta2 = parameters.beta2
val transactions = LinkedHashMap<UUID, Transaction>(mapOf(genesisTx.id to genesisTx))
val queried = mutableSetOf<UUID>(genesisTx.id)
val conflicts = mutableMapOf<Int, ConflictSet>(genesisTx.data to ConflictSet(genesisTx, genesisTx, 0, 1))
val accepted = mutableSetOf<UUID>(genesisTx.id)
val parentSets = mutableMapOf<UUID, Set<Transaction>>()
fun onGenerateTx(data: Int): Transaction {
val edges = parentSelection()
val t = Transaction(UUID.randomUUID(), data, edges.map { it.id })
onReceiveTx(this, t)
return t
}
fun onReceiveTx(sender: Node, tx: Transaction) {
if (transactions.contains(tx.id)) return
tx.chit = 0
tx.confidence = 0
tx.parents.forEach {
if (!transactions.contains(it)) {
val t = sender.onSendTx(it)
onReceiveTx(sender, t)
}
}
if (!conflicts.contains(tx.data)) {
conflicts[tx.data] = ConflictSet(tx, tx, 0, 1)
} else {
conflicts[tx.data]!!.size++
}
transactions[tx.id] = tx
}
fun onSendTx(id: UUID): Transaction {
return transactions[id]!!.copy()
}
fun onQuery(sender: Node, tx: Transaction): Int {
onReceiveTx(sender, tx)
return if (isStronglyPreferred(tx)) 1
else 0
}
fun avalancheLoop() {
val txs = transactions.values.filterNot { queried.contains(it.id) }
txs.forEach { tx ->
val sample = network.nodes.filterNot { it == this }.shuffled(rng).take(k)
val res = sample.map {
val txCopy = tx.copy()
it.onQuery(this, txCopy)
}.sum()
if (res >= alpha * k) {
tx.chit = 1
// Update the preference for ancestors.
parentSet(tx).forEach { p ->
p.confidence += 1
}
parentSet(tx).forEach { p->
val cs = conflicts[p.data]!!
if (p.confidence > cs.pref.confidence) {
cs.pref = p
}
if (p != cs.last) {
cs.last = p
cs.count = 0
} else {
cs.count++
}
}
}
queried.add(tx.id)
}
}
fun isPreferred(tx: Transaction): Boolean {
return conflicts[tx.data]!!.pref == tx
}
fun isStronglyPreferred(tx: Transaction): Boolean {
return parentSet(tx).map { isPreferred(it) }.all { it }
}
fun isAccepted(tx: Transaction): Boolean {
if (accepted.contains(tx.id)) return true
if (!queried.contains(tx.id)) return false
val cs = conflicts[tx.data]!!
val parentsAccepted = tx.parents.map { accepted.contains(it) }.all { it }
val isAccepted = (parentsAccepted && cs.size == 1 && tx.confidence > beta1) ||
(cs.pref == tx && cs.count > beta2)
if (isAccepted) accepted.add(tx.id)
return isAccepted
}
fun parentSet(tx: Transaction): Set<Transaction> {
if (parentSets.contains(tx.id)) return parentSets[tx.id]!!
val parents = mutableSetOf<Transaction>()
var ps = tx.parents.toSet()
while (ps.isNotEmpty()) {
ps.forEach {
if (transactions.contains(it)) parents.add(transactions[it]!!)
}
ps = ps.flatMap {
if (transactions.contains(it)) {
transactions[it]!!.parents
} else {
emptyList()
}
}.toSet()
}
parentSets[tx.id] = parents
return parents
}
fun parentSelection(): List<Transaction> {
val eps0 = transactions.values.filter { isStronglyPreferred(it) }
val eps1 = eps0.filter { conflicts[it.data]!!.size == 1 || it.confidence > 0 }
val parents = eps1.flatMap { parentSet(it) }.toSet().filterNot { eps1.contains(it) }
val fallback = if (transactions.size == 1) listOf(genesisTx)
else transactions.values.reversed().take(10).filter { !isAccepted(it) && conflicts[it.data]!!.size == 1 }.shuffled(network.rng).take(3)
require(parents.isNotEmpty() || fallback.isNotEmpty()) { "Unable to select parents." }
return if (parents.isEmpty()) return fallback else parents
}
fun dumpDag(f: File) {
f.printWriter().use { out ->
out.println("digraph G {")
transactions.values.forEach {
val color = if (isAccepted(it)) "color=lightblue; style=filled;" else ""
val pref = if (conflicts[it.data]!!.size > 1 && isPreferred(it)) "*" else ""
val chit = if (queried.contains(it.id)) it.chit.toString() else "?"
out.println("\"${it.id}\" [$color label=\"${it.data}$pref, $chit, ${it.confidence}\"];")
}
transactions.values.forEach {
it.parents.forEach { p->
out.println("\"${it.id}\" -> \"$p\";")
}
}
out.println("}")
}
}
}

View File

@ -0,0 +1,38 @@
package net.corda.avalanche
import picocli.CommandLine
class Parameters {
@CommandLine.Option(names = ["-n", "--num-transactions"], description = ["How many transactions to generate (default: 20)"])
var nrTransactions: Int = 20
@CommandLine.Option(names = ["-d", "--double-spend-ratio"], description = ["The double spend ratio (default: 0.02)"])
var doubleSpendRatio: Double = 0.02
@CommandLine.Option(names = ["-a", "--alpha"], description = ["The alpha parameter (default: 0.8)"])
var alpha = 0.8
@CommandLine.Option(names = ["--num-nodes"], description = ["The number of nodes (default: 50)"])
var nrNodes = 50
@CommandLine.Option(names = ["-k", "--sample-size"], description = ["The sample size (default `1 + nrNodes / 10`)"])
var k = 1 + nrNodes / 10
@CommandLine.Option(names = ["--beta1"], description = ["The beta1 parameter (default: 5)"])
var beta1 = 5
@CommandLine.Option(names = ["--beta2"], description = ["The beta1 parameter (default: 5)"])
var beta2 = 5
@CommandLine.Option(names = ["-h", "--help"], usageHelp = true, description = ["Display help and exit"])
var helpRequested = false
@CommandLine.Option(names = ["--seed"], description = ["The RNG seed (default: 23)"])
var seed = 23L
@CommandLine.Option(names = ["--dump-dags"], description = ["Dump DAGs in dot format (default: false)"])
var dumpDags = false
@CommandLine.Option(names = ["-v", "--verbose"], description=["Verbose mode (default: false)"])
var verbose = false
}

View File

@ -74,9 +74,7 @@ class BootTests : IntegrationTest() {
@Test
fun `double node start doesn't write into log file`() {
val logConfigFile = projectRootDir / "config" / "dev" / "log4j2.xml"
assertThat(logConfigFile).isRegularFile()
driver(DriverParameters(isDebug = true, systemProperties = mapOf("log4j.configurationFile" to logConfigFile.toString()))) {
driver(DriverParameters(isDebug = true)) {
val alice = startNode(providedName = ALICE_NAME).get()
val logFolder = alice.baseDirectory / NodeStartup.LOGS_DIRECTORY_NAME
val logFile = logFolder.list { it.filter { it.fileName.toString().endsWith(".log") }.findAny().get() }

View File

@ -122,7 +122,7 @@ class NodeAttachmentService(
@ElementCollection
@Column(name = "contract_class_name", nullable = false)
@CollectionTable(name = "node_attchments_contracts", joinColumns = [(JoinColumn(name = "att_id", referencedColumnName = "att_id"))],
@CollectionTable(name = "${NODE_DATABASE_PREFIX}attachments_contracts", joinColumns = [(JoinColumn(name = "att_id", referencedColumnName = "att_id"))],
foreignKey = ForeignKey(name = "FK__ctr_class__attachments"))
var contractClassNames: List<ContractClassName>? = null
) : Serializable

View File

@ -16,5 +16,5 @@
<include file="migration/node-core.changelog-init.xml"/>
<include file="migration/node-core.changelog-v3.xml"/>
<include file="migration/node-core.changelog-v3-GA.xml"/>
</databaseChangeLog>

View File

@ -0,0 +1,12 @@
<?xml version="1.1" encoding="UTF-8" standalone="no"?>
<databaseChangeLog xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
xmlns:ext="http://www.liquibase.org/xml/ns/dbchangelog-ext"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog-ext http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-ext.xsd http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.5.xsd"
logicalFilePath="migration/node-services.changelog-init.xml">
<changeSet author="R3.Corda" id="rename_contract_attachment">
<renameTable newTableName="node_attachments_contracts"
oldTableName="node_attchments_contracts"/>
</changeSet>
</databaseChangeLog>

View File

@ -12,9 +12,11 @@ package net.corda.vega
import com.opengamma.strata.product.common.BuySell
import net.corda.core.identity.CordaX500Name
import net.corda.core.internal.div
import net.corda.core.internal.packageName
import net.corda.core.utilities.getOrThrow
import net.corda.serialization.internal.amqp.AbstractAMQPSerializationScheme
import net.corda.testing.common.internal.ProjectStructure.projectRootDir
import net.corda.testing.core.DUMMY_BANK_A_NAME
import net.corda.testing.core.DUMMY_BANK_B_NAME
import net.corda.testing.core.DUMMY_NOTARY_NAME
@ -64,7 +66,13 @@ class SimmValuationTest : IntegrationTest() {
@Test
fun `runs SIMM valuation demo`() {
driver(DriverParameters(isDebug = true, extraCordappPackagesToScan = listOf("net.corda.vega.contracts", "net.corda.vega.plugin.customserializers"))) {
val logConfigFile = projectRootDir / "samples" / "simm-valuation-demo" / "src" / "main" / "resources" / "log4j2.xml"
assertThat(logConfigFile).isRegularFile()
driver(DriverParameters(
isDebug = true,
extraCordappPackagesToScan = listOf("net.corda.vega.contracts", "net.corda.vega.plugin.customserializers"),
systemProperties = mapOf("log4j.configurationFile" to logConfigFile.toString()))
) {
val nodeAFuture = startNode(providedName = nodeALegalName)
val nodeBFuture = startNode(providedName = nodeBLegalName)
val (nodeA, nodeB) = listOf(nodeAFuture, nodeBFuture).map { it.getOrThrow() }

View File

@ -28,6 +28,7 @@ include 'client:rpc'
include 'webserver'
include 'webserver:webcapsule'
include 'experimental'
include 'experimental:avalanche'
include 'experimental:behave'
include 'experimental:sandbox'
include 'experimental:quasar-hook'

View File

@ -32,9 +32,7 @@ object ProcessUtilities {
workingDirectory: Path?,
maximumHeapSize: String
): Process {
// FIXME: Instead of hacking our classpath, use the correct classpath for className.
val classpath = defaultClassPath.split(pathSeparator).filter { !(it / "log4j2-test.xml").exists() }.joinToString(pathSeparator)
return startJavaProcessImpl(className, arguments, classpath, jdwpPort, extraJvmArguments, workingDirectory, maximumHeapSize)
return startJavaProcessImpl(className, arguments, defaultClassPath, jdwpPort, extraJvmArguments, workingDirectory, maximumHeapSize)
}
fun startJavaProcessImpl(

View File

@ -10,13 +10,20 @@
-->
<Configuration status="info">
<Properties>
<Property name="defaultLogLevel">info</Property>
<Property name="log-path">${sys:log-path:-logs}</Property>
<Property name="log-name">node-${hostName}</Property>
<Property name="archive">${log-path}/archive</Property>
<Property name="defaultLogLevel">${sys:defaultLogLevel:-info}</Property>
</Properties>
<ThresholdFilter level="trace"/>
<Appenders>
<Console name="Console-Appender" target="SYSTEM_OUT">
<PatternLayout>
<ScriptPatternSelector defaultPattern="[%-5level] %date{HH:mm:ss,SSS} [%t] (%F:%L) %c{2}.%method - %msg%n">
<ScriptPatternSelector defaultPattern="%highlight{[%level{length=5}] %date{HH:mm:ss,SSS} [%t] %c{2}.%method - %msg%n}{INFO=white,WARN=red,FATAL=bright red}">
<Script name="MDCSelector" language="javascript"><![CDATA[
result = null;
if (!logEvent.getContextData().size() == 0) {
@ -27,31 +34,67 @@
result;
]]>
</Script>
<PatternMatch key="WithMDC" pattern="[%-5level] %date{HH:mm:ss,SSS} [%t] (%F:%L) %c{2}.%method - %msg %X%n"/>
<PatternMatch key="WithMDC" pattern="%highlight{[%level{length=5}] %date{HH:mm:ss,SSS} [%t] %c{2}.%method - %msg %X%n}{INFO=white,WARN=red,FATAL=bright red}"/>
</ScriptPatternSelector>
</PatternLayout>
<ThresholdFilter level="trace"/>
</Console>
<!-- Required for printBasicInfo -->
<Console name="Console-Appender-Println" target="SYSTEM_OUT">
<PatternLayout pattern="%msg%n"/>
<PatternLayout pattern="%msg%n" />
</Console>
<!-- Will generate up to 100 log files for a given day. During every rollover it will delete
those that are older than 60 days, but keep the most recent 10 GB -->
<RollingFile name="RollingFile-Appender"
fileName="${log-path}/${log-name}.log"
filePattern="${archive}/${log-name}.%date{yyyy-MM-dd}-%i.log.gz">
<PatternLayout pattern="[%-5level] %date{ISO8601}{UTC}Z [%t] %c{2}.%method - %msg %X%n"/>
<Policies>
<TimeBasedTriggeringPolicy/>
<SizeBasedTriggeringPolicy size="10MB"/>
</Policies>
<DefaultRolloverStrategy min="1" max="100">
<Delete basePath="${archive}" maxDepth="1">
<IfFileName glob="${log-name}*.log.gz"/>
<IfLastModified age="60d">
<IfAny>
<IfAccumulatedFileSize exceeds="10 GB"/>
</IfAny>
</IfLastModified>
</Delete>
</DefaultRolloverStrategy>
</RollingFile>
</Appenders>
<Loggers>
<Root level="info">
<AppenderRef ref="Console-Appender"/>
</Root>
<Logger name="net.corda" level="${sys:defaultLogLevel}" additivity="false">
<Logger name="net.corda" level="${defaultLogLevel}" additivity="false">
<AppenderRef ref="Console-Appender"/>
<AppenderRef ref="RollingFile-Appender" />
</Logger>
<Logger name="BasicInfo" additivity="false">
<AppenderRef ref="Console-Appender-Println"/>
<AppenderRef ref="RollingFile-Appender" />
</Logger>
<Logger name="org.hibernate.SQL" level="info" additivity="false">
<AppenderRef ref="Console-Appender"/>
<AppenderRef ref="RollingFile-Appender"/>
</Logger>
<Logger name="org.apache.activemq.artemis.core.server" level="error" additivity="false">
<AppenderRef ref="Console-Appender"/>
<AppenderRef ref="RollingFile-Appender"/>
</Logger>
<Logger name="org.jolokia" additivity="true" level="warn">
<AppenderRef ref="Console-Appender-Println"/>
<AppenderRef ref="RollingFile-Appender" />
</Logger>
</Loggers>
</Configuration>

View File

@ -4,7 +4,7 @@ DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.dummy_linear_state_parts;
DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_parts;
DROP TABLE IF EXISTS ${schema}.dummy_deal_states_parts;
DROP TABLE IF EXISTS ${schema}.node_attchments_contracts;
DROP TABLE IF EXISTS ${schema}.node_attachments_contracts;
DROP TABLE IF EXISTS ${schema}.node_attachments;
DROP TABLE IF EXISTS ${schema}.node_checkpoints;
DROP TABLE IF EXISTS ${schema}.node_transactions;

View File

@ -4,7 +4,7 @@ DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.dummy_linear_state_parts;
DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_parts;
DROP TABLE IF EXISTS ${schema}.dummy_deal_states_parts;
DROP TABLE IF EXISTS ${schema}.node_attchments_contracts;
DROP TABLE IF EXISTS ${schema}.node_attachments_contracts;
DROP TABLE IF EXISTS ${schema}.node_attachments;
DROP TABLE IF EXISTS ${schema}.node_checkpoints;
DROP TABLE IF EXISTS ${schema}.node_transactions;

View File

@ -4,7 +4,7 @@ DROP TABLE ${schema}.cp_states_v2_participants CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_linear_state_parts CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_linear_states_v2_parts CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_deal_states_parts CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_attchments_contracts CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_attachments_contracts CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_attachments CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_checkpoints CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_transactions CASCADE CONSTRAINTS

View File

@ -6,7 +6,7 @@ DROP TABLE ${schema}.dummy_linear_states_v2_parts CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_deal_states_parts CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_test_states_parts CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_test_states CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_attchments_contracts CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_attachments_contracts CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_attachments CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_checkpoints CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_transactions CASCADE CONSTRAINTS

View File

@ -4,7 +4,7 @@ DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.dummy_linear_state_parts;
DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_parts;
DROP TABLE IF EXISTS ${schema}.dummy_deal_states_parts;
DROP TABLE IF EXISTS ${schema}.node_attchments_contracts;
DROP TABLE IF EXISTS ${schema}.node_attachments_contracts;
DROP TABLE IF EXISTS ${schema}.node_attachments;
DROP TABLE IF EXISTS ${schema}.node_checkpoints;
DROP TABLE IF EXISTS ${schema}.node_transactions;

View File

@ -4,7 +4,7 @@ DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.dummy_linear_state_parts;
DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_parts;
DROP TABLE IF EXISTS ${schema}.dummy_deal_states_parts;
DROP TABLE IF EXISTS ${schema}.node_attchments_contracts;
DROP TABLE IF EXISTS ${schema}.node_attachments_contracts;
DROP TABLE IF EXISTS ${schema}.node_attachments;
DROP TABLE IF EXISTS ${schema}.node_checkpoints;
DROP TABLE IF EXISTS ${schema}.node_transactions;

View File

@ -4,7 +4,7 @@ DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.dummy_linear_state_parts;
DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_parts;
DROP TABLE IF EXISTS ${schema}.dummy_deal_states_parts;
DROP TABLE IF EXISTS ${schema}.node_attchments_contracts;
DROP TABLE IF EXISTS ${schema}.node_attachments_contracts;
DROP TABLE IF EXISTS ${schema}.node_attachments;
DROP TABLE IF EXISTS ${schema}.node_checkpoints;
DROP TABLE IF EXISTS ${schema}.node_transactions;

View File

@ -14,6 +14,7 @@ import com.typesafe.config.Config
import net.corda.core.internal.deleteRecursively
import net.corda.core.internal.div
import net.corda.core.utilities.NetworkHostAndPort
import net.corda.nodeapi.internal.config.UnknownConfigKeysPolicy
import net.corda.nodeapi.internal.config.parseAs
import tornadofx.*
import java.io.IOException
@ -29,7 +30,7 @@ class InstallFactory : Controller() {
require(nodeController.isPortValid(port)) { "Invalid port $port" }
}
val nodeConfig = config.parseAs<NodeConfig>()
val nodeConfig = config.parseAs<NodeConfig>(UnknownConfigKeysPolicy.IGNORE::handle)
nodeConfig.p2pAddress.checkPort()
nodeConfig.rpcAddress.checkPort()
nodeConfig.webAddress.checkPort()

View File

@ -43,7 +43,10 @@ data class NodeConfig(
val issuableCurrencies: List<String> = emptyList(),
/** Pass-through for generating node.conf with external DB */
val dataSourceProperties: Properties? = null,
val database: Properties? = null
val database: Properties? = null,
private val devMode: Boolean = true,
private val detectPublicIp: Boolean = false,
private val useTestClock: Boolean = true
) {
companion object {
val renderOptions: ConfigRenderOptions = ConfigRenderOptions.defaults().setOriginComments(false)
@ -51,14 +54,9 @@ data class NodeConfig(
const val cordappDirName = "cordapps"
}
@Suppress("unused")
private val detectPublicIp = false
@Suppress("unused")
private val useTestClock = true
fun nodeConf(): Config {
val basic = NodeConfigurationData(myLegalName, p2pAddress, rpcAddress, notary, h2port, rpcUsers, useTestClock, detectPublicIp).toConfig()
val basic = NodeConfigurationData(myLegalName, p2pAddress, rpcAddress, notary, h2port, rpcUsers, useTestClock, detectPublicIp, devMode).toConfig()
val rpcSettings = empty()
.withValue("address", ConfigValueFactory.fromAnyRef(rpcAddress.toString()))
.withValue("adminAddress", ConfigValueFactory.fromAnyRef(rpcAdminAddress.toString()))
@ -88,7 +86,8 @@ private data class NodeConfigurationData(
val h2port: Int,
val rpcUsers: List<User> = listOf(NodeConfig.defaultUser),
val useTestClock: Boolean,
val detectPublicIp: Boolean
val detectPublicIp: Boolean,
val devMode: Boolean
)
private data class WebServerConfigurationData(

View File

@ -23,6 +23,7 @@ import javafx.scene.layout.HBox
import javafx.scene.layout.StackPane
import javafx.scene.layout.VBox
import javafx.util.Duration
import net.corda.client.rpc.RPCException
import net.corda.core.concurrent.match
import net.corda.core.contracts.ContractState
import net.corda.core.messaging.CordaRPCOps
@ -211,13 +212,25 @@ class NodeTerminalView : Fragment() {
}
val fxScheduler = Schedulers.from(Platform::runLater)
subscriptions.add(txNext.observeOn(fxScheduler).subscribe {
subscriptions.add(txNext.observeOn(fxScheduler).subscribe({
transactions.value = (++txCount).toString()
})
subscriptions.add(stateNext.observeOn(fxScheduler).subscribe {
}, { error ->
if (error is RPCException && error.message?.contains("Connection failure detected") == true) {
// Ignore this ^^^, it only happens when we shutdown a node in Demobench.
} else {
throw error
}
}))
subscriptions.add(stateNext.observeOn(fxScheduler).subscribe({
stateCount += (it.produced.size - it.consumed.size)
states.value = stateCount.toString()
})
}, { error ->
if (error is RPCException && error.message?.contains("Connection failure detected") == true) {
// Ignore this ^^^, it only happens when we shutdown a node in Demobench.
} else {
throw error
}
}))
} catch (e: Exception) {
log.log(Level.WARNING, "RPC failed: ${e.message}", e)
}