Oracle 11xe and 12c database compatibility (#206)

* Sql setup scripts for Oracle and datasource configuration.
* CashSelection for Oracle.
* Workaround of forbidden distinct for BLOBs.
* ojdbc8.jar driver dependency from Oracle Maven repository, ojdbc6.jar from lib (it's not in Maven repo).
* allow to use random Port in node names and trim it from schema name, remove schema with port numbers from db setup (to cater for new test, non Oracle specific change) 
* Removed unnecessary code from ConfigUtilities (non Oracle specific change) 
* Removed db integration tests setup for RPCStabilityTest as it doesn't start any nodes
This commit is contained in:
szymonsztuka 2018-01-29 18:35:58 +00:00 committed by GitHub
parent da5d0ab806
commit 1ecf646f7e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
56 changed files with 403 additions and 157 deletions

View File

@ -206,6 +206,16 @@ allprojects {
mavenCentral() mavenCentral()
jcenter() jcenter()
maven { url 'https://jitpack.io' } maven { url 'https://jitpack.io' }
maven {
// For integrationTest task when running against Oracle database the JDBC driver is in Oracle Maven repository with login access only,
// setup an account on https://login.oracle.com/oaam_server/login.do
// provide credentials to Gradle task by -PmavenOracleUsername=... -PmavenOraclePassword=...
url "https://www.oracle.com/content/secure/maven/content"
credentials {
username = project.findProperty("mavenOracleUsername") ?: ""
password = project.findProperty("mavenOraclePassword") ?: ""
}
}
} }
configurations { configurations {

View File

@ -33,7 +33,7 @@ import net.corda.testing.driver.driver
import net.corda.testing.node.User import net.corda.testing.node.User
import net.corda.testing.internal.IntegrationTest import net.corda.testing.internal.IntegrationTest
import net.corda.testing.internal.IntegrationTestSchemas import net.corda.testing.internal.IntegrationTestSchemas
import net.corda.testing.internal.toDatabaseSchemaNames import net.corda.testing.internal.toDatabaseSchemaName
import org.junit.ClassRule import org.junit.ClassRule
import org.junit.Test import org.junit.Test
import rx.Observable import rx.Observable
@ -57,7 +57,7 @@ class NodeMonitorModelTest : IntegrationTest() {
companion object { companion object {
@ClassRule @JvmField @ClassRule @JvmField
val databaseSchemas = IntegrationTestSchemas(*listOf(ALICE_NAME, BOB_NAME, CHARLIE_NAME, DUMMY_NOTARY_NAME) val databaseSchemas = IntegrationTestSchemas(*listOf(ALICE_NAME, BOB_NAME, CHARLIE_NAME, DUMMY_NOTARY_NAME)
.map { it.toDatabaseSchemaNames("", "_10000", "_10003") }.flatten().toTypedArray()) .map { it.toDatabaseSchemaName() }.toTypedArray())
} }
private fun setup(runTest: () -> Unit) { private fun setup(runTest: () -> Unit) {

View File

@ -12,20 +12,13 @@ import net.corda.core.serialization.serialize
import net.corda.core.utilities.* import net.corda.core.utilities.*
import net.corda.node.services.messaging.RPCServerConfiguration import net.corda.node.services.messaging.RPCServerConfiguration
import net.corda.nodeapi.RPCApi import net.corda.nodeapi.RPCApi
import net.corda.testing.core.ALICE_NAME
import net.corda.testing.core.BOB_NAME
import net.corda.testing.core.DUMMY_BANK_A_NAME
import net.corda.testing.core.SerializationEnvironmentRule import net.corda.testing.core.SerializationEnvironmentRule
import net.corda.testing.internal.IntegrationTest
import net.corda.testing.internal.IntegrationTestSchemas
import net.corda.testing.internal.testThreadFactory import net.corda.testing.internal.testThreadFactory
import net.corda.testing.internal.toDatabaseSchemaNames
import net.corda.testing.node.internal.* import net.corda.testing.node.internal.*
import org.apache.activemq.artemis.api.core.SimpleString import org.apache.activemq.artemis.api.core.SimpleString
import org.junit.After import org.junit.After
import org.junit.Assert.assertEquals import org.junit.Assert.assertEquals
import org.junit.Assert.assertTrue import org.junit.Assert.assertTrue
import org.junit.ClassRule
import org.junit.Rule import org.junit.Rule
import org.junit.Test import org.junit.Test
import rx.Observable import rx.Observable
@ -38,7 +31,7 @@ import java.util.concurrent.ScheduledExecutorService
import java.util.concurrent.TimeUnit import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicInteger import java.util.concurrent.atomic.AtomicInteger
class RPCStabilityTests : IntegrationTest() { class RPCStabilityTests {
@Rule @Rule
@JvmField @JvmField
val testSerialization = SerializationEnvironmentRule(true) val testSerialization = SerializationEnvironmentRule(true)
@ -48,12 +41,6 @@ class RPCStabilityTests : IntegrationTest() {
pool.shutdown() pool.shutdown()
} }
companion object {
@ClassRule @JvmField
val databaseSchemas = IntegrationTestSchemas(*listOf(ALICE_NAME, BOB_NAME, DUMMY_BANK_A_NAME)
.map { it.toDatabaseSchemaNames("", "_10000", "_10003", "_10012") }.flatten().toTypedArray())
}
object DummyOps : RPCOps { object DummyOps : RPCOps {
override val protocolVersion = 0 override val protocolVersion = 0
} }

View File

@ -191,6 +191,8 @@ R3 Corda 3.0 Developer Preview
* Enterprise Corda only: Compatibility with PostgreSQL 9.6 database. * Enterprise Corda only: Compatibility with PostgreSQL 9.6 database.
* Enterprise Corda only: Compatibility with Oracle 11g RC2 and 12c database.
* Move to a message based control of peer to peer bridge formation to allow for future out of process bridging components. * Move to a message based control of peer to peer bridge formation to allow for future out of process bridging components.
This removes the legacy Artemis bridges completely, so the ``useAMQPBridges`` configuration property has been removed. This removes the legacy Artemis bridges completely, so the ``useAMQPBridges`` configuration property has been removed.

View File

@ -26,7 +26,8 @@ import kotlin.test.assertEquals
class IntegrationTestingTutorial : IntegrationTest() { class IntegrationTestingTutorial : IntegrationTest() {
companion object { companion object {
@ClassRule @JvmField @ClassRule @JvmField
val databaseSchemas = IntegrationTestSchemas(*listOf(ALICE_NAME, BOB_NAME, DUMMY_NOTARY_NAME).map { it.toDatabaseSchemaName() }.toTypedArray()) val databaseSchemas = IntegrationTestSchemas(ALICE_NAME.toDatabaseSchemaName(), BOB_NAME.toDatabaseSchemaName(),
DUMMY_NOTARY_NAME.toDatabaseSchemaName())
} }
@Test @Test

View File

@ -63,6 +63,29 @@ Example node configuration for SQL Azure:
} }
jarDirs = [PATH_TO_JDBC_DRIVER_DIR] jarDirs = [PATH_TO_JDBC_DRIVER_DIR]
Oracle
````````````````````````
Corda supports Oracle 11g RC2 and Oracle 12c with ojdbc6.jar driver..
The minimum transaction isolation level ``database.transactionIsolationLevel`` is 'READ_COMMITTED'.
The property ``database.schema`` is optional.
Example node configuration for Oracle:
.. sourcecode:: none
dataSourceProperties = {
dataSourceClassName = "oracle.jdbc.pool.OracleDataSource"
dataSource.url = "jdbc:oracle:thin:@[IP]:[PORT]:xe"
dataSource.user = [USER]
dataSource.password = [PASSWORD]
}
database = {
transactionIsolationLevel = READ_COMMITTED
schema = [SCHEMA]
}
jarDirs = [PATH_TO_JDBC_DRIVER_DIR]
.. _postgres_ref:
PostgreSQL PostgreSQL
```````````````````````` ````````````````````````
Corda has been tested on PostgreSQL 9.6 database, using PostgreSQL JDBC Driver 42.1.4. Corda has been tested on PostgreSQL 9.6 database, using PostgreSQL JDBC Driver 42.1.4.

View File

@ -10,7 +10,7 @@ import net.corda.testing.core.DUMMY_BANK_A_NAME
import net.corda.testing.driver.driver import net.corda.testing.driver.driver
import net.corda.testing.internal.IntegrationTest import net.corda.testing.internal.IntegrationTest
import net.corda.testing.internal.IntegrationTestSchemas import net.corda.testing.internal.IntegrationTestSchemas
import net.corda.testing.internal.toDatabaseSchemaNames import net.corda.testing.internal.toDatabaseSchemaName
import org.assertj.core.api.Assertions.assertThat import org.assertj.core.api.Assertions.assertThat
import org.junit.ClassRule import org.junit.ClassRule
import org.junit.Test import org.junit.Test
@ -18,8 +18,8 @@ import org.junit.Test
class CashConfigDataFlowTest : IntegrationTest() { class CashConfigDataFlowTest : IntegrationTest() {
companion object { companion object {
@ClassRule @JvmField @ClassRule @JvmField
val databaseSchemas = IntegrationTestSchemas(*listOf(ALICE_NAME, BOB_NAME, DUMMY_BANK_A_NAME) val databaseSchemas = IntegrationTestSchemas(ALICE_NAME.toDatabaseSchemaName(), BOB_NAME.toDatabaseSchemaName(),
.map { it.toDatabaseSchemaNames("", "_10000", "_10003") }.flatten().toTypedArray()) DUMMY_BANK_A_NAME.toDatabaseSchemaName())
} }
@Test @Test
fun `issuable currencies are read in from node config`() { fun `issuable currencies are read in from node config`() {

View File

@ -0,0 +1,82 @@
package net.corda.finance.contracts.asset.cash.selection
import net.corda.core.contracts.Amount
import net.corda.core.crypto.toStringShort
import net.corda.core.identity.AbstractParty
import net.corda.core.identity.Party
import net.corda.core.utilities.*
import java.sql.Connection
import java.sql.DatabaseMetaData
import java.sql.ResultSet
import java.util.*
class CashSelectionOracleImpl : AbstractCashSelection(maxRetries = 16, retrySleep = 1000, retryCap = 5000) {
companion object {
val JDBC_DRIVER_NAME = "Oracle JDBC driver"
private val log = contextLogger()
}
override fun isCompatible(metaData: DatabaseMetaData): Boolean {
return metaData.driverName == JDBC_DRIVER_NAME
}
override fun toString() = "${this::class.java} for $JDBC_DRIVER_NAME"
override fun executeQuery(connection: Connection, amount: Amount<Currency>, lockId: UUID, notary: Party?,
onlyFromIssuerParties: Set<AbstractParty>, withIssuerRefs: Set<OpaqueBytes>, withResultSet: (ResultSet) -> Boolean): Boolean {
val selectJoin = """
WITH entry(transaction_id, output_index, pennies, total, lock_id) AS
(
SELECT vs.transaction_id, vs.output_index, ccs.pennies,
SUM(ccs.pennies) OVER (ORDER BY ccs.transaction_id), vs.lock_id
FROM contract_cash_states ccs, vault_states vs
WHERE vs.transaction_id = ccs.transaction_id AND vs.output_index = ccs.output_index
AND vs.state_status = 0
AND ccs.ccy_code = ?
AND (vs.lock_id = ? OR vs.lock_id is null)
"""+
(if (notary != null)
" AND vs.notary_name = ?" else "") +
(if (onlyFromIssuerParties.isNotEmpty())
" AND ccs.issuer_key IN (?)" else "") +
(if (withIssuerRefs.isNotEmpty())
" AND ccs.issuer_ref IN (?)" else "") +
""")
SELECT transaction_id, output_index, pennies, total, lock_id
FROM entry where total <= ? + pennies"""
// Use prepared statement for protection against SQL Injection (http://www.h2database.com/html/advanced.html#sql_injection)
connection.prepareStatement(selectJoin).use { statement ->
var pIndex = 0
statement.setString(++pIndex, amount.token.currencyCode)
statement.setString(++pIndex, lockId.toString())
if (notary != null)
statement.setString(++pIndex, notary.name.toString())
if (onlyFromIssuerParties.isNotEmpty())
statement.setObject(++pIndex, onlyFromIssuerParties.map { it.owningKey.toStringShort() as Any }.toTypedArray())
if (withIssuerRefs.isNotEmpty())
statement.setObject(++pIndex, withIssuerRefs.map { it.bytes.toHexString() as Any }.toTypedArray())
statement.setLong(++pIndex, amount.quantity)
// https://stackoverflow.com/questions/2683214/get-query-from-java-sql-preparedstatement
log.trace {
"""$selectJoin
Prepared statement parameter values:
ccy_code = ${amount.token.currencyCode}
lock_id = $lockId
""" +
(if (notary != null) "notary = ${notary.name}" else "") +
(if (onlyFromIssuerParties.isNotEmpty()) "issuer_key IN ${onlyFromIssuerParties.map { it.owningKey.toStringShort() as Any }.toTypedArray()}" else "") +
(if (withIssuerRefs.isNotEmpty()) "issuer_ref IN ${withIssuerRefs.map { it.bytes.toHexString() as Any }.toTypedArray()}" else "") +
"total <= ${amount.quantity}"
}
statement.executeQuery().use { rs ->
return withResultSet(rs)
}
}
}
}

View File

@ -2,3 +2,4 @@ net.corda.finance.contracts.asset.cash.selection.CashSelectionH2Impl
net.corda.finance.contracts.asset.cash.selection.CashSelectionMySQLImpl net.corda.finance.contracts.asset.cash.selection.CashSelectionMySQLImpl
net.corda.finance.contracts.asset.cash.selection.CashSelectionPostgreSQLImpl net.corda.finance.contracts.asset.cash.selection.CashSelectionPostgreSQLImpl
net.corda.finance.contracts.asset.cash.selection.CashSelectionSQLServerImpl net.corda.finance.contracts.asset.cash.selection.CashSelectionSQLServerImpl
net.corda.finance.contracts.asset.cash.selection.CashSelectionOracleImpl

View File

@ -46,8 +46,7 @@ class NodeRegistrationTest : IntegrationTest() {
private val aliceName = CordaX500Name("Alice", "London", "GB") private val aliceName = CordaX500Name("Alice", "London", "GB")
private val genevieveName = CordaX500Name("Genevieve", "London", "GB") private val genevieveName = CordaX500Name("Genevieve", "London", "GB")
@ClassRule @ClassRule @JvmField
@JvmField
val databaseSchemas = IntegrationTestSchemas(notaryName.organisation, aliceName.organisation, genevieveName.organisation) val databaseSchemas = IntegrationTestSchemas(notaryName.organisation, aliceName.organisation, genevieveName.organisation)
private val timeoutMillis = 5.seconds.toMillis() private val timeoutMillis = 5.seconds.toMillis()

View File

@ -39,9 +39,9 @@ class HibernateConfiguration(
// to avoid OOM when large blobs might get logged. // to avoid OOM when large blobs might get logged.
applyBasicType(CordaMaterializedBlobType, CordaMaterializedBlobType.name) applyBasicType(CordaMaterializedBlobType, CordaMaterializedBlobType.name)
applyBasicType(CordaWrapperBinaryType, CordaWrapperBinaryType.name) applyBasicType(CordaWrapperBinaryType, CordaWrapperBinaryType.name)
// When connecting to SqlServer (and only then) do we need to tell hibernate to use // When connecting to SqlServer or Oracle, do we need to tell hibernate to use
// nationalised (i.e. Unicode) strings by default // nationalised (i.e. Unicode) strings by default
val forceUnicodeForSqlServer = jdbcUrl.contains(":sqlserver:", ignoreCase = true) val forceUnicodeForSqlServer = listOf(":oracle:", ":sqlserver:").any { jdbcUrl.contains(it, ignoreCase = true) }
enableGlobalNationalizedCharacterDataSupport(forceUnicodeForSqlServer) enableGlobalNationalizedCharacterDataSupport(forceUnicodeForSqlServer)
return build() return build()
} }

View File

@ -51,13 +51,6 @@ processTestResources {
from file("$rootDir/config/test/jolokia-access.xml") from file("$rootDir/config/test/jolokia-access.xml")
} }
// Map DB provider to driver artifact imported as runtime dependency in integration tests.
def jdbcRuntimeDependency = [
'integration-sql-server': "com.microsoft.sqlserver:mssql-jdbc:6.2.1.jre8",
'integration-azure-sql' : "com.microsoft.sqlserver:mssql-jdbc:6.2.1.jre8",
'integration-postgres' : "org.postgresql:postgresql:${postgresql_version}"
]
// To find potential version conflicts, run "gradle htmlDependencyReport" and then look in // To find potential version conflicts, run "gradle htmlDependencyReport" and then look in
// build/reports/project/dependencies/index.html for green highlighted parts of the tree. // build/reports/project/dependencies/index.html for green highlighted parts of the tree.
@ -196,15 +189,36 @@ dependencies {
testCompile "org.glassfish.jersey.containers:jersey-container-servlet-core:${jersey_version}" testCompile "org.glassfish.jersey.containers:jersey-container-servlet-core:${jersey_version}"
testCompile "org.glassfish.jersey.containers:jersey-container-jetty-http:${jersey_version}" testCompile "org.glassfish.jersey.containers:jersey-container-jetty-http:${jersey_version}"
// Add runtime-only dependency on the JDBC driver for the specified DB provider // Add runtime-only dependency on the JDBC driver for the specified DB provider (used in database integration tests)
def DB_PROVIDER = System.getProperty("databaseProvider") def DB_PROVIDER = System.getProperty("databaseProvider")
if (DB_PROVIDER != null) { switch (DB_PROVIDER) {
final driverDependency = jdbcRuntimeDependency[DB_PROVIDER] case null: //DB provider not provided, use default H2 driver (already in the classpath)
if (driverDependency != null) { break
runtime driverDependency case "integration-sql-server" :
} else { runtime "com.microsoft.sqlserver:mssql-jdbc:6.2.1.jre8"
throw new GradleException('Unsupported DB provider: ' + DB_PROVIDER) break
case "integration-azure-sql" :
runtime "com.microsoft.sqlserver:mssql-jdbc:6.2.1.jre8"
break
case "integration-oracle" :
runtime ("com.oracle.jdbc:ojdbc8:12.2.0.1") { //exclude unnecessary or conflicting libraries
exclude group: "com.oracle.jdbc", module: "ucp"
exclude group: "com.oracle.jdbc", module: "ons"
exclude group: "com.oracle.jdbc", module: "xmlparserv2"
exclude group: "com.oracle.jdbc", module: "xdb6"
exclude group: "com.oracle.jdbc", module: "oraclepki"
exclude group: "com.oracle.jdbc", module: "osdt_cert"
exclude group: "com.oracle.jdbc", module: "osdt_core"
} }
break
case "integration-oracle-11" :
runtime files("lib/ojdbc6.jar")
break
case "integration-postgres" :
runtime "org.postgresql:postgresql:${postgresql_version}"
break
default:
throw new GradleException('Unsupported DB provider: ' + DB_PROVIDER)
} }
// Jolokia JVM monitoring agent // Jolokia JVM monitoring agent

BIN
node/lib/ojdbc6.jar Normal file

Binary file not shown.

View File

@ -17,7 +17,6 @@ import net.corda.testing.driver.driver
import net.corda.testing.internal.IntegrationTest import net.corda.testing.internal.IntegrationTest
import net.corda.testing.internal.IntegrationTestSchemas import net.corda.testing.internal.IntegrationTestSchemas
import net.corda.testing.internal.toDatabaseSchemaName import net.corda.testing.internal.toDatabaseSchemaName
import net.corda.testing.internal.toDatabaseSchemaNames
import net.corda.testing.node.User import net.corda.testing.node.User
import org.assertj.core.api.Assertions.assertThat import org.assertj.core.api.Assertions.assertThat
import org.assertj.core.api.Assertions.assertThatThrownBy import org.assertj.core.api.Assertions.assertThatThrownBy
@ -30,9 +29,8 @@ import kotlin.test.assertEquals
class BootTests : IntegrationTest() { class BootTests : IntegrationTest() {
companion object { companion object {
@ClassRule @JvmField @ClassRule @JvmField
val databaseSchemas = IntegrationTestSchemas(*listOf(ALICE_NAME, BOB_NAME, DUMMY_BANK_A_NAME) val databaseSchemas = IntegrationTestSchemas(*listOf(ALICE_NAME, BOB_NAME, DUMMY_BANK_A_NAME, DUMMY_NOTARY_NAME)
.map { it.toDatabaseSchemaNames("", "_10000", "_10003") }.flatten().toTypedArray() .map { it.toDatabaseSchemaName() }.toTypedArray())
+ DUMMY_NOTARY_NAME.toDatabaseSchemaName())
} }
@Test @Test

View File

@ -4,6 +4,7 @@ import com.google.common.base.Stopwatch
import net.corda.testing.core.ALICE_NAME import net.corda.testing.core.ALICE_NAME
import net.corda.testing.core.BOB_NAME import net.corda.testing.core.BOB_NAME
import net.corda.testing.core.DUMMY_BANK_A_NAME import net.corda.testing.core.DUMMY_BANK_A_NAME
import net.corda.testing.core.DUMMY_NOTARY_NAME
import net.corda.testing.driver.driver import net.corda.testing.driver.driver
import net.corda.testing.internal.IntegrationTest import net.corda.testing.internal.IntegrationTest
import net.corda.testing.internal.IntegrationTestSchemas import net.corda.testing.internal.IntegrationTestSchemas
@ -18,7 +19,8 @@ import java.util.concurrent.TimeUnit
class NodeStartupPerformanceTests : IntegrationTest() { class NodeStartupPerformanceTests : IntegrationTest() {
companion object { companion object {
@ClassRule @JvmField @ClassRule @JvmField
val databaseSchemas = IntegrationTestSchemas(*listOf(ALICE_NAME, BOB_NAME, DUMMY_BANK_A_NAME).map { it.toDatabaseSchemaName() }.toTypedArray()) val databaseSchemas = IntegrationTestSchemas(*listOf(ALICE_NAME, BOB_NAME, DUMMY_BANK_A_NAME, DUMMY_NOTARY_NAME)
.map { it.toDatabaseSchemaName() }.toTypedArray())
} }
// Measure the startup time of nodes. Note that this includes an RPC roundtrip, which causes e.g. Kryo initialisation. // Measure the startup time of nodes. Note that this includes an RPC roundtrip, which causes e.g. Kryo initialisation.
@Test @Test

View File

@ -7,14 +7,19 @@ import net.corda.core.utilities.contextLogger
import net.corda.core.utilities.getOrThrow import net.corda.core.utilities.getOrThrow
import net.corda.testing.core.DUMMY_BANK_A_NAME import net.corda.testing.core.DUMMY_BANK_A_NAME
import net.corda.testing.driver.driver import net.corda.testing.driver.driver
import net.corda.testing.internal.IntegrationTest
import net.corda.testing.internal.IntegrationTestSchemas
import net.corda.testing.internal.toDatabaseSchemaName
import org.junit.Assert import org.junit.Assert
import org.junit.ClassRule
import org.junit.Test import org.junit.Test
import java.util.concurrent.CountDownLatch import java.util.concurrent.CountDownLatch
import java.util.concurrent.TimeUnit import java.util.concurrent.TimeUnit
class NodeUnloadHandlerTests { class NodeUnloadHandlerTests : IntegrationTest() {
companion object { companion object {
@ClassRule @JvmField
val databaseSchemas = IntegrationTestSchemas(DUMMY_BANK_A_NAME.toDatabaseSchemaName())
val latch = CountDownLatch(1) val latch = CountDownLatch(1)
} }

View File

@ -13,6 +13,7 @@ import net.corda.core.utilities.getOrThrow
import net.corda.core.utilities.unwrap import net.corda.core.utilities.unwrap
import net.corda.node.services.Permissions.Companion.startFlow import net.corda.node.services.Permissions.Companion.startFlow
import net.corda.testing.core.ALICE_NAME import net.corda.testing.core.ALICE_NAME
import net.corda.testing.core.DUMMY_NOTARY_NAME
import net.corda.testing.driver.driver import net.corda.testing.driver.driver
import net.corda.testing.internal.IntegrationTest import net.corda.testing.internal.IntegrationTest
import net.corda.testing.internal.IntegrationTestSchemas import net.corda.testing.internal.IntegrationTestSchemas
@ -30,7 +31,7 @@ import kotlin.test.fail
class SSHServerTest : IntegrationTest() { class SSHServerTest : IntegrationTest() {
companion object { companion object {
@ClassRule @JvmField @ClassRule @JvmField
val databaseSchemas = IntegrationTestSchemas(ALICE_NAME.toDatabaseSchemaName()) val databaseSchemas = IntegrationTestSchemas(ALICE_NAME.toDatabaseSchemaName(), DUMMY_NOTARY_NAME.toDatabaseSchemaName())
} }
@Test() @Test()

View File

@ -46,8 +46,8 @@ class AttachmentLoadingTests : IntegrationTest() {
private companion object { private companion object {
@ClassRule @JvmField @ClassRule @JvmField
val databaseSchemas = IntegrationTestSchemas(*listOf(DUMMY_BANK_A_NAME, DUMMY_BANK_B_NAME, DUMMY_NOTARY_NAME) val databaseSchemas = IntegrationTestSchemas(DUMMY_BANK_A_NAME.toDatabaseSchemaName(), DUMMY_BANK_B_NAME.toDatabaseSchemaName(),
.map { it.toDatabaseSchemaName() }.toTypedArray()) DUMMY_NOTARY_NAME.toDatabaseSchemaName())
private val logger = contextLogger() private val logger = contextLogger()
val isolatedJAR = AttachmentLoadingTests::class.java.getResource("isolated.jar")!! val isolatedJAR = AttachmentLoadingTests::class.java.getResource("isolated.jar")!!

View File

@ -26,7 +26,6 @@ import net.corda.testing.node.User
import net.corda.testing.node.internal.DummyClusterSpec import net.corda.testing.node.internal.DummyClusterSpec
import org.assertj.core.api.Assertions.assertThat import org.assertj.core.api.Assertions.assertThat
import org.junit.ClassRule import org.junit.ClassRule
import org.junit.Ignore
import org.junit.Test import org.junit.Test
import rx.Observable import rx.Observable
import java.util.* import java.util.*

View File

@ -11,7 +11,6 @@ import net.corda.core.internal.concurrent.map
import net.corda.core.transactions.TransactionBuilder import net.corda.core.transactions.TransactionBuilder
import net.corda.core.utilities.getOrThrow import net.corda.core.utilities.getOrThrow
import net.corda.node.internal.StartedNode import net.corda.node.internal.StartedNode
import net.corda.testing.*
import net.corda.testing.core.DUMMY_BANK_A_NAME import net.corda.testing.core.DUMMY_BANK_A_NAME
import net.corda.testing.core.chooseIdentity import net.corda.testing.core.chooseIdentity
import net.corda.testing.contracts.DummyContract import net.corda.testing.contracts.DummyContract
@ -25,7 +24,6 @@ import net.corda.testing.node.ClusterSpec
import net.corda.testing.node.NotarySpec import net.corda.testing.node.NotarySpec
import net.corda.testing.node.startFlow import net.corda.testing.node.startFlow
import org.junit.ClassRule import org.junit.ClassRule
import org.junit.Ignore
import org.junit.Test import org.junit.Test
import java.util.* import java.util.*
import kotlin.test.assertEquals import kotlin.test.assertEquals

View File

@ -7,7 +7,6 @@ import net.corda.core.node.NodeInfo
import net.corda.core.utilities.NetworkHostAndPort import net.corda.core.utilities.NetworkHostAndPort
import net.corda.node.internal.Node import net.corda.node.internal.Node
import net.corda.node.internal.StartedNode import net.corda.node.internal.StartedNode
import net.corda.testing.*
import net.corda.testing.internal.IntegrationTestSchemas import net.corda.testing.internal.IntegrationTestSchemas
import net.corda.testing.internal.toDatabaseSchemaName import net.corda.testing.internal.toDatabaseSchemaName
import net.corda.testing.core.* import net.corda.testing.core.*
@ -25,7 +24,8 @@ class PersistentNetworkMapCacheTest : NodeBasedTest() {
val DUMMY_REGULATOR = TestIdentity(CordaX500Name("Regulator A", "Paris", "FR"), 100).party val DUMMY_REGULATOR = TestIdentity(CordaX500Name("Regulator A", "Paris", "FR"), 100).party
@ClassRule @JvmField @ClassRule @JvmField
val databaseSchemas = IntegrationTestSchemas(*listOf(DUMMY_REGULATOR.name, ALICE.name, BOB.name).map { it.toDatabaseSchemaName() }.toTypedArray()) val databaseSchemas = IntegrationTestSchemas(DUMMY_REGULATOR.name.toDatabaseSchemaName(), ALICE.name.toDatabaseSchemaName(),
BOB.name.toDatabaseSchemaName())
} }
private val partiesList = listOf(DUMMY_REGULATOR, ALICE, BOB) private val partiesList = listOf(DUMMY_REGULATOR, ALICE, BOB)
private val addressesMap = HashMap<CordaX500Name, NetworkHostAndPort>() private val addressesMap = HashMap<CordaX500Name, NetworkHostAndPort>()

View File

@ -5,14 +5,25 @@ import net.corda.core.utilities.getOrThrow
import net.corda.node.services.Permissions.Companion.all import net.corda.node.services.Permissions.Companion.all
import net.corda.node.testsupport.withCertificates import net.corda.node.testsupport.withCertificates
import net.corda.node.testsupport.withKeyStores import net.corda.node.testsupport.withKeyStores
import net.corda.testing.core.ALICE_NAME
import net.corda.testing.core.BOB_NAME
import net.corda.testing.core.DUMMY_BANK_A_NAME
import net.corda.testing.core.DUMMY_NOTARY_NAME
import net.corda.testing.driver.PortAllocation import net.corda.testing.driver.PortAllocation
import net.corda.testing.driver.driver import net.corda.testing.driver.driver
import net.corda.testing.internal.useSslRpcOverrides import net.corda.testing.internal.*
import net.corda.testing.node.User import net.corda.testing.node.User
import org.assertj.core.api.Assertions.assertThat import org.assertj.core.api.Assertions.assertThat
import org.junit.ClassRule
import org.junit.Test import org.junit.Test
class RpcSslTest { class RpcSslTest : IntegrationTest() {
companion object {
@ClassRule @JvmField
val databaseSchemas = IntegrationTestSchemas(*listOf(ALICE_NAME, BOB_NAME, DUMMY_BANK_A_NAME, DUMMY_NOTARY_NAME)
.map { it.toDatabaseSchemaName() }.toTypedArray())
}
@Test @Test
fun rpc_client_using_ssl() { fun rpc_client_using_ssl() {
val user = User("mark", "dadada", setOf(all())) val user = User("mark", "dadada", setOf(all()))

View File

@ -7,7 +7,6 @@ import net.corda.core.flows.InitiatingFlow
import net.corda.core.identity.Party import net.corda.core.identity.Party
import net.corda.core.utilities.getOrThrow import net.corda.core.utilities.getOrThrow
import net.corda.core.utilities.unwrap import net.corda.core.utilities.unwrap
import net.corda.testing.*
import net.corda.testing.internal.IntegrationTestSchemas import net.corda.testing.internal.IntegrationTestSchemas
import net.corda.testing.internal.toDatabaseSchemaName import net.corda.testing.internal.toDatabaseSchemaName
import net.corda.testing.core.chooseIdentity import net.corda.testing.core.chooseIdentity

View File

@ -30,7 +30,8 @@ class LargeTransactionsTest : IntegrationTest() {
val BOB = TestIdentity(BOB_NAME, 80).party val BOB = TestIdentity(BOB_NAME, 80).party
val DUMMY_NOTARY = TestIdentity(DUMMY_NOTARY_NAME, 20).party val DUMMY_NOTARY = TestIdentity(DUMMY_NOTARY_NAME, 20).party
@ClassRule @JvmField @ClassRule @JvmField
val databaseSchemas = IntegrationTestSchemas(*listOf(ALICE_NAME, BOB_NAME, DUMMY_NOTARY_NAME).map { it.toDatabaseSchemaName() }.toTypedArray()) val databaseSchemas = IntegrationTestSchemas(ALICE_NAME.toDatabaseSchemaName(), BOB_NAME.toDatabaseSchemaName(),
DUMMY_NOTARY_NAME.toDatabaseSchemaName())
} }
@StartableByRPC @StartableByRPC

View File

@ -2,10 +2,17 @@ package net.corda.test.node
import net.corda.core.utilities.getOrThrow import net.corda.core.utilities.getOrThrow
import net.corda.testing.core.ALICE_NAME import net.corda.testing.core.ALICE_NAME
import net.corda.testing.internal.IntegrationTestSchemas
import net.corda.testing.internal.toDatabaseSchemaName
import net.corda.testing.node.internal.NodeBasedTest import net.corda.testing.node.internal.NodeBasedTest
import org.junit.ClassRule
import org.junit.Test import org.junit.Test
class NodeStartAndStopTest : NodeBasedTest() { class NodeStartAndStopTest : NodeBasedTest() {
companion object {
@ClassRule @JvmField
val databaseSchemas = IntegrationTestSchemas(ALICE_NAME.toDatabaseSchemaName())
}
@Test @Test
fun `start stop start`() { fun `start stop start`() {

View File

@ -25,7 +25,6 @@ import net.corda.testing.driver.driver
import net.corda.testing.internal.IntegrationTest import net.corda.testing.internal.IntegrationTest
import net.corda.testing.internal.IntegrationTestSchemas import net.corda.testing.internal.IntegrationTestSchemas
import net.corda.testing.internal.toDatabaseSchemaName import net.corda.testing.internal.toDatabaseSchemaName
import net.corda.testing.internal.toDatabaseSchemaNames
import net.corda.testing.node.User import net.corda.testing.node.User
import org.junit.Assume.assumeFalse import org.junit.Assume.assumeFalse
import org.junit.ClassRule import org.junit.ClassRule
@ -40,9 +39,8 @@ import kotlin.test.assertNotNull
class NodeStatePersistenceTests : IntegrationTest() { class NodeStatePersistenceTests : IntegrationTest() {
companion object { companion object {
@ClassRule @JvmField @ClassRule @JvmField
val databaseSchemas = IntegrationTestSchemas(*listOf(ALICE_NAME, BOB_NAME, DUMMY_BANK_A_NAME) val databaseSchemas = IntegrationTestSchemas(*listOf(ALICE_NAME, BOB_NAME, DUMMY_BANK_A_NAME, DUMMY_NOTARY_NAME)
.map { it.toDatabaseSchemaNames("", "_10000", "_10003", "_10006") }.flatten().toTypedArray(), .map { it.toDatabaseSchemaName() }.toTypedArray())
DUMMY_NOTARY_NAME.toDatabaseSchemaName())
} }
@Test @Test
fun `persistent state survives node restart`() { fun `persistent state survives node restart`() {

View File

@ -9,7 +9,6 @@ import net.corda.core.internal.createDirectories
import net.corda.core.internal.div import net.corda.core.internal.div
import net.corda.core.internal.exists import net.corda.core.internal.exists
import net.corda.nodeapi.internal.config.SSLConfiguration import net.corda.nodeapi.internal.config.SSLConfiguration
import net.corda.nodeapi.internal.config.toProperties
import net.corda.nodeapi.internal.createDevKeyStores import net.corda.nodeapi.internal.createDevKeyStores
import net.corda.nodeapi.internal.crypto.X509KeyStore import net.corda.nodeapi.internal.crypto.X509KeyStore
import net.corda.nodeapi.internal.crypto.loadKeyStore import net.corda.nodeapi.internal.crypto.loadKeyStore
@ -31,23 +30,12 @@ object ConfigHelper {
val appConfig = ConfigFactory.parseFile(configFile.toFile(), parseOptions.setAllowMissing(allowMissingConfig)) val appConfig = ConfigFactory.parseFile(configFile.toFile(), parseOptions.setAllowMissing(allowMissingConfig))
val databaseConfig = ConfigFactory.parseResources(System.getProperty("databaseProvider")+".conf", parseOptions.setAllowMissing(true)) val databaseConfig = ConfigFactory.parseResources(System.getProperty("databaseProvider")+".conf", parseOptions.setAllowMissing(true))
//typesafe workaround: a system property with placeholder is passed as value (inside quotes),
//undo adding the quotes for a fixed placeholder ${nodeOrganizationName}
//https://github.com/lightbend/config/issues/265
var systemUnquotedPlaceholders: Config = ConfigFactory.empty()
ConfigFactory.systemProperties().toProperties().forEach { name, value ->
if (value.toString().contains("\${nodeOrganizationName}")) {
var unquotedPlaceholder = "\"" + value.toString().replace("\${nodeOrganizationName}","\"\${nodeOrganizationName}\"") + "\""
systemUnquotedPlaceholders = systemUnquotedPlaceholders.withFallback(ConfigFactory.parseString(name.toString() + " = " + unquotedPlaceholder))
}
}
val finalConfig = configOverrides val finalConfig = configOverrides
// Add substitution values here // Add substitution values here
.withFallback(systemUnquotedPlaceholders) .withFallback(configOf("nodeOrganizationName" to parseToDbSchemaFriendlyName(baseDirectory.fileName.toString()))) //for database integration tests
.withFallback(configOf("nodeOrganizationName" to baseDirectory.fileName.toString().replace(" ","").replace("-","_"))) .withFallback(ConfigFactory.systemProperties()) //for database integration tests
.withFallback(ConfigFactory.systemProperties()) .withFallback(configOf("baseDirectory" to baseDirectory.toString()))
.withFallback( configOf("baseDirectory" to baseDirectory.toString())) .withFallback(databaseConfig) //for database integration tests
.withFallback(databaseConfig)
.withFallback(appConfig) .withFallback(appConfig)
.withFallback(defaultConfig) .withFallback(defaultConfig)
.resolve() .resolve()
@ -88,3 +76,6 @@ fun SSLConfiguration.configureDevKeyAndTrustStores(myLegalName: CordaX500Name) {
} }
} }
} }
/** Parse a value to be database schema name friendly and removes the last part if it matches a port ("_" followed by at least 5 digits) */
fun parseToDbSchemaFriendlyName(value: String) =
value.replace(" ", "").replace("-", "_").replace(Regex("_\\d{5,}$"),"")

View File

@ -274,9 +274,10 @@ open class PersistentNetworkMapCache(
private fun queryIdentityByLegalName(session: Session, name: CordaX500Name): PartyAndCertificate? { private fun queryIdentityByLegalName(session: Session, name: CordaX500Name): PartyAndCertificate? {
val query = session.createQuery( val query = session.createQuery(
// We do the JOIN here to restrict results to those present in the network map // We do the JOIN here to restrict results to those present in the network map
"SELECT DISTINCT l FROM ${NodeInfoSchemaV1.PersistentNodeInfo::class.java.name} n JOIN n.legalIdentitiesAndCerts l WHERE l.name = :name", "SELECT l FROM ${NodeInfoSchemaV1.PersistentNodeInfo::class.java.name} n JOIN n.legalIdentitiesAndCerts l WHERE l.name = :name",
NodeInfoSchemaV1.DBPartyAndCertificate::class.java) NodeInfoSchemaV1.DBPartyAndCertificate::class.java)
query.setParameter("name", name.toString()) query.setParameter("name", name.toString())
query.maxResults = 1 // instead of DISTINCT in the query, DISTINCT is not supported in Oracle when one of the columns is BLOB
val candidates = query.resultList.map { it.toLegalIdentityAndCert() } val candidates = query.resultList.map { it.toLegalIdentityAndCert() }
// The map is restricted to holding a single identity for any X.500 name, so firstOrNull() is correct here. // The map is restricted to holding a single identity for any X.500 name, so firstOrNull() is correct here.
return candidates.firstOrNull() return candidates.firstOrNull()

View File

@ -16,5 +16,8 @@
<changeSet author="R3.Corda" id="1511451595465-1.3" dbms="postgresql"> <changeSet author="R3.Corda" id="1511451595465-1.3" dbms="postgresql">
<createSequence sequenceName="hibernate_sequence" minValue="1"/> <createSequence sequenceName="hibernate_sequence" minValue="1"/>
</changeSet> </changeSet>
<changeSet author="tudormalene (generated)" id="1511451595465-1.0" dbms="oracle">
<createSequence sequenceName="hibernate_sequence" minValue="1"/>
</changeSet>
</databaseChangeLog> </databaseChangeLog>

View File

@ -115,8 +115,7 @@
<column name="PID" type="NVARCHAR(255)"/> <column name="PID" type="NVARCHAR(255)"/>
<column name="MUTUAL_EXCLUSION_TIMESTAMP" type="timestamp"/> <column name="MUTUAL_EXCLUSION_TIMESTAMP" type="timestamp"/>
</createTable> </createTable>
<addPrimaryKey columnNames="mutual_exclusion_id" constraintName="node_mutual_exclusion_pkey" <addPrimaryKey columnNames="mutual_exclusion_id" constraintName="node_mutual_exclusion_pkey" tableName="node_mutual_exclusion"/>
tableName="node_mutual_exclusion"/>
</changeSet> </changeSet>
<changeSet author="R3.Corda" id="1511451595465-30"> <changeSet author="R3.Corda" id="1511451595465-30">
<addPrimaryKey columnNames="att_id" constraintName="node_attachments_pkey" tableName="node_attachments"/> <addPrimaryKey columnNames="att_id" constraintName="node_attachments_pkey" tableName="node_attachments"/>
@ -163,6 +162,11 @@
referencedColumnNames="party_name" referencedTableName="node_info_party_cert"/> referencedColumnNames="party_name" referencedTableName="node_info_party_cert"/>
</changeSet> </changeSet>
<changeSet author="R3.Corda" id="add index att_id"> <changeSet author="R3.Corda" id="add index att_id">
<preConditions onFail="MARK_RAN">
<not>
<dbms type="oracle"/>
</not>
</preConditions>
<createIndex tableName="node_attachments" indexName="att_id_idx"> <createIndex tableName="node_attachments" indexName="att_id_idx">
<column name="att_id"/> <column name="att_id"/>
</createIndex> </createIndex>

View File

@ -41,6 +41,7 @@ import java.time.LocalDate
import java.time.ZoneOffset import java.time.ZoneOffset
import java.time.temporal.ChronoUnit import java.time.temporal.ChronoUnit
import java.util.* import java.util.*
import kotlin.test.assertTrue
open class VaultQueryTests { open class VaultQueryTests {
private companion object { private companion object {

View File

@ -36,6 +36,7 @@ object CommercialPaperSchemaV1 : MappedSchema(schemaFamily = CommercialPaperSche
var issuancePartyHash: String, var issuancePartyHash: String,
@Column(name = "issuance_ref") @Column(name = "issuance_ref")
@Type(type = "corda-wrapper-binary")
var issuanceRef: ByteArray, var issuanceRef: ByteArray,
@Column(name = "owner_key_hash", length = MAX_HASH_HEX_SIZE) @Column(name = "owner_key_hash", length = MAX_HASH_HEX_SIZE)

View File

@ -45,8 +45,8 @@ class IRSDemoTest : IntegrationTest() {
private val log = contextLogger() private val log = contextLogger()
@ClassRule @JvmField @ClassRule @JvmField
val databaseSchemas = IntegrationTestSchemas(*listOf(DUMMY_BANK_A_NAME, DUMMY_BANK_B_NAME, DUMMY_NOTARY_NAME) val databaseSchemas = IntegrationTestSchemas(DUMMY_BANK_A_NAME.toDatabaseSchemaName(), DUMMY_BANK_B_NAME.toDatabaseSchemaName(),
.map { it.toDatabaseSchemaName() }.toTypedArray()) DUMMY_NOTARY_NAME.toDatabaseSchemaName())
} }
private val rpcUsers = listOf(User("user", "password", setOf("ALL"))) private val rpcUsers = listOf(User("user", "password", setOf("ALL")))

View File

@ -45,8 +45,8 @@ class DriverTests : IntegrationTest() {
} }
@ClassRule @JvmField @ClassRule @JvmField
val databaseSchemas = IntegrationTestSchemas(*listOf(DUMMY_BANK_A_NAME, DUMMY_NOTARY_NAME, DUMMY_REGULATOR_NAME) val databaseSchemas = IntegrationTestSchemas(DUMMY_BANK_A_NAME.toDatabaseSchemaName(), DUMMY_NOTARY_NAME.toDatabaseSchemaName(),
.map { it.toDatabaseSchemaName() }.toTypedArray()) DUMMY_REGULATOR_NAME.toDatabaseSchemaName())
} }
@Test @Test

View File

@ -28,6 +28,7 @@ import net.corda.node.services.api.SchemaService
import net.corda.node.services.api.VaultServiceInternal import net.corda.node.services.api.VaultServiceInternal
import net.corda.node.services.api.WritableTransactionStorage import net.corda.node.services.api.WritableTransactionStorage
import net.corda.node.services.config.configOf import net.corda.node.services.config.configOf
import net.corda.node.services.config.parseToDbSchemaFriendlyName
import net.corda.node.services.identity.InMemoryIdentityService import net.corda.node.services.identity.InMemoryIdentityService
import net.corda.node.services.keys.freshCertificate import net.corda.node.services.keys.freshCertificate
import net.corda.node.services.keys.getSigner import net.corda.node.services.keys.getSigner
@ -360,12 +361,11 @@ fun databaseProviderDataSourceConfig(nodeName: String? = null, postfix: String?
val fixedOverride = ConfigFactory.parseString("baseDirectory = \"\"") val fixedOverride = ConfigFactory.parseString("baseDirectory = \"\"")
//implied property nodeOrganizationName to fill the potential placeholders in db schema/ db user properties //implied property nodeOrganizationName to fill the potential placeholders in db schema/ db user properties
val standardizedNodeName = nodeName?.replace(" ", "")?.replace("-", "_") val nodeOrganizationNameConfig = if (nodeName != null) configOf("nodeOrganizationName" to parseToDbSchemaFriendlyName(nodeName)) else ConfigFactory.empty()
val nodeOrganizationNameConfig = if (standardizedNodeName != null) configOf("nodeOrganizationName" to standardizedNodeName) else ConfigFactory.empty()
//defaults to H2 //defaults to H2
//for H2 the same db instance runs for all integration tests, so adding additional variable postfix create a unique database each time //for H2 the same db instance runs for all integration tests, so adding additional variable postfix create a unique database each time
val defaultConfig = inMemoryH2DataSourceConfig(standardizedNodeName, postfix) val defaultConfig = inMemoryH2DataSourceConfig(nodeName, postfix)
return systemConfigOverride.withFallback(databaseConfig) return systemConfigOverride.withFallback(databaseConfig)
.withFallback(fixedOverride) .withFallback(fixedOverride)

View File

@ -7,7 +7,6 @@ import net.corda.testing.database.DatabaseConstants.DATA_SOURCE_CLASSNAME
import net.corda.testing.database.DatabaseConstants.DATA_SOURCE_PASSWORD import net.corda.testing.database.DatabaseConstants.DATA_SOURCE_PASSWORD
import net.corda.testing.database.DatabaseConstants.DATA_SOURCE_URL import net.corda.testing.database.DatabaseConstants.DATA_SOURCE_URL
import net.corda.testing.database.DatabaseConstants.DATA_SOURCE_USER import net.corda.testing.database.DatabaseConstants.DATA_SOURCE_USER
import org.apache.commons.logging.LogFactory
import org.springframework.core.io.ClassPathResource import org.springframework.core.io.ClassPathResource
import org.springframework.core.io.support.EncodedResource import org.springframework.core.io.support.EncodedResource
import org.springframework.jdbc.datasource.DriverManagerDataSource import org.springframework.jdbc.datasource.DriverManagerDataSource
@ -19,7 +18,7 @@ import java.sql.SQLWarning
import java.util.* import java.util.*
object DbScriptRunner { object DbScriptRunner {
private val log = loggerFor<DbScriptRunner>() private val logger = loggerFor<DbScriptRunner>()
// System properties set in main 'corda-project' build.gradle // System properties set in main 'corda-project' build.gradle
private const val TEST_DB_ADMIN_USER = "test.db.admin.user" private const val TEST_DB_ADMIN_USER = "test.db.admin.user"
@ -50,10 +49,10 @@ object DbScriptRunner {
val encodedResource = EncodedResource(initSchema) val encodedResource = EncodedResource(initSchema)
val inputString = encodedResource.inputStream.bufferedReader().use { it.readText().split("\n") } val inputString = encodedResource.inputStream.bufferedReader().use { it.readText().split("\n") }
val resolvedScripts = merge(inputString, databaseSchemas) val resolvedScripts = merge(inputString, databaseSchemas)
log.info("Executing DB Script for schemas $databaseSchemas with ${resolvedScripts.size} statements.") logger.info("Executing DB Script for schemas $databaseSchemas with ${resolvedScripts.size} statements.")
DatabasePopulatorUtils.execute(ListPopulator(false, true, resolvedScripts), DatabasePopulatorUtils.execute(ListPopulator(false, true, resolvedScripts),
createDataSource(dbProvider)) createDataSource(dbProvider))
} else log.warn("DB Script missing: $initSchema") } else logger.warn("DB Script missing: $initSchema")
} }
} }
@ -68,15 +67,11 @@ object DbScriptRunner {
class ListPopulator(private val continueOnError: Boolean, class ListPopulator(private val continueOnError: Boolean,
private val ignoreFailedDrops: Boolean, private val ignoreFailedDrops: Boolean,
private val statements: List<String>) : DatabasePopulator { private val statements: List<String>) : DatabasePopulator {
private val logger = LogFactory.getLog(ScriptUtils::class.java) private val logger = loggerFor<DbScriptRunner>()
override fun populate(connection: Connection) { override fun populate(connection: Connection) {
try { try {
if (logger.isInfoEnabled) { logger.info("Executing SQL script")
logger.info("Executing SQL script " )
}
val startTime = System.currentTimeMillis() val startTime = System.currentTimeMillis()
val resource = statements.toString().substring(0,30) + " [...]"
var stmtNumber = 0 var stmtNumber = 0
val stmt = connection.createStatement() val stmt = connection.createStatement()
try { try {
@ -97,9 +92,14 @@ class ListPopulator(private val continueOnError: Boolean,
} }
} catch (ex: SQLException) { } catch (ex: SQLException) {
val dropStatement = StringUtils.startsWithIgnoreCase(statement.trim { it <= ' ' }, "drop") val dropStatement = StringUtils.startsWithIgnoreCase(statement.trim { it <= ' ' }, "drop")
if (continueOnError || dropStatement && ignoreFailedDrops) { if ((continueOnError || dropStatement && ignoreFailedDrops)) {
if (logger.isDebugEnabled) { val dropUserStatement = StringUtils.startsWithIgnoreCase(statement.trim { it <= ' ' }, "drop user ")
logger.debug(ex) if (dropUserStatement) { // log to help spotting a node still logged on database after test has finished (happens on Oracle db)
logger.warn("SQLException for $statement: SQL state '" + ex.sqlState +
"', error code '" + ex.errorCode +
"', message [" + ex.message + "]")
} else {
logger.debug("SQLException for $statement", ex)
} }
} else { } else {
throw ex throw ex
@ -113,12 +113,9 @@ class ListPopulator(private val continueOnError: Boolean,
logger.debug("Could not close JDBC Statement", ex) logger.debug("Could not close JDBC Statement", ex)
} }
} }
val elapsedTime = System.currentTimeMillis() - startTime val elapsedTime = System.currentTimeMillis() - startTime
if (logger.isInfoEnabled) { val resource = if (statements.isNotEmpty()) statements[0] + " [...]" else ""
logger.info("Executed SQL script from $resource in $elapsedTime ms.") logger.info("Executed ${statements.size} SQL statements ($resource) in $elapsedTime ms.")
}
logger.info("Executed SQL script $resource" )
} catch (ex: Exception) { } catch (ex: Exception) {
if (ex is ScriptException) { if (ex is ScriptException) {
throw ex throw ex

View File

@ -16,7 +16,7 @@ object HttpUtils {
private val client by lazy { private val client by lazy {
OkHttpClient.Builder() OkHttpClient.Builder()
.connectTimeout(5, TimeUnit.SECONDS) .connectTimeout(5, TimeUnit.SECONDS)
.readTimeout(60, TimeUnit.SECONDS).build() .readTimeout(90, TimeUnit.SECONDS).build()
} }
val defaultMapper: ObjectMapper by lazy { val defaultMapper: ObjectMapper by lazy {

View File

@ -32,7 +32,7 @@ object DummyLinearStateSchemaV1 : MappedSchema(schemaFamily = DummyLinearStateSc
/** X500Name of participant parties **/ /** X500Name of participant parties **/
@ElementCollection @ElementCollection
@Column(name = "participants") @Column(name = "participants")
@CollectionTable(name = "dummy_linear_state_participants", joinColumns = arrayOf( @CollectionTable(name = "dummy_linear_state_parts", joinColumns = arrayOf(
JoinColumn(name = "output_index", referencedColumnName = "output_index"), JoinColumn(name = "output_index", referencedColumnName = "output_index"),
JoinColumn(name = "transaction_id", referencedColumnName = "transaction_id"))) JoinColumn(name = "transaction_id", referencedColumnName = "transaction_id")))
var participants: MutableSet<AbstractParty>, var participants: MutableSet<AbstractParty>,

View File

@ -1,9 +1,9 @@
DROP TABLE IF EXISTS ${schema}.cash_state_participants; DROP TABLE IF EXISTS ${schema}.cash_state_participants;
DROP TABLE IF EXISTS ${schema}.cash_states_v2_participants; DROP TABLE IF EXISTS ${schema}.cash_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants; DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.dummy_linear_state_participants; DROP TABLE IF EXISTS ${schema}.dummy_linear_state_parts;
DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_participants; DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_parts;
DROP TABLE IF EXISTS ${schema}.dummy_deal_states_participants; DROP TABLE IF EXISTS ${schema}.dummy_deal_states_parts;
DROP TABLE IF EXISTS ${schema}.node_attachments; DROP TABLE IF EXISTS ${schema}.node_attachments;
DROP TABLE IF EXISTS ${schema}.node_checkpoints; DROP TABLE IF EXISTS ${schema}.node_checkpoints;
DROP TABLE IF EXISTS ${schema}.node_transactions; DROP TABLE IF EXISTS ${schema}.node_transactions;

View File

@ -1,9 +1,9 @@
DROP TABLE IF EXISTS ${schema}.cash_state_participants; DROP TABLE IF EXISTS ${schema}.cash_state_participants;
DROP TABLE IF EXISTS ${schema}.cash_states_v2_participants; DROP TABLE IF EXISTS ${schema}.cash_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants; DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.dummy_linear_state_participants; DROP TABLE IF EXISTS ${schema}.dummy_linear_state_parts;
DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_participants; DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_parts;
DROP TABLE IF EXISTS ${schema}.dummy_deal_states_participants; DROP TABLE IF EXISTS ${schema}.dummy_deal_states_parts;
DROP TABLE IF EXISTS ${schema}.node_attachments; DROP TABLE IF EXISTS ${schema}.node_attachments;
DROP TABLE IF EXISTS ${schema}.node_checkpoints; DROP TABLE IF EXISTS ${schema}.node_checkpoints;
DROP TABLE IF EXISTS ${schema}.node_transactions; DROP TABLE IF EXISTS ${schema}.node_transactions;

View File

@ -1,37 +1,17 @@
--once off script to run against master database (not a user database) --once off script to run against master database (not a user database)
CREATE LOGIN Alice WITH PASSWORD = 'yourStrong(!)Password'; CREATE LOGIN Alice WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN AliceCorp WITH PASSWORD = 'yourStrong(!)Password'; CREATE LOGIN AliceCorp WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN AliceCorp_10000 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN AliceCorp_10003 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN AliceCorp_10006 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN AliceCorp_10012 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN BankA WITH PASSWORD = 'yourStrong(!)Password'; CREATE LOGIN BankA WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN BankA_10000 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN BankA_10003 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN BankA_10006 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN BankA_10012 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN BankB WITH PASSWORD = 'yourStrong(!)Password'; CREATE LOGIN BankB WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN BankOfCorda WITH PASSWORD = 'yourStrong(!)Password'; CREATE LOGIN BankOfCorda WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN BigCorporation WITH PASSWORD = 'yourStrong(!)Password'; CREATE LOGIN BigCorporation WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN BobPlc WITH PASSWORD = 'yourStrong(!)Password'; CREATE LOGIN BobPlc WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN BobPlc_10000 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN BobPlc_10003 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN BobPlc_10006 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN BobPlc_10012 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN CharlieLtd WITH PASSWORD = 'yourStrong(!)Password'; CREATE LOGIN CharlieLtd WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN CharlieLtd_10000 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN CharlieLtd_10003 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN CharlieLtd_10006 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN DistributedService_0 WITH PASSWORD = 'yourStrong(!)Password'; CREATE LOGIN DistributedService_0 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN DistributedService_1 WITH PASSWORD = 'yourStrong(!)Password'; CREATE LOGIN DistributedService_1 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN NetworkMapService WITH PASSWORD = 'yourStrong(!)Password'; CREATE LOGIN NetworkMapService WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN Notary WITH PASSWORD = 'yourStrong(!)Password'; CREATE LOGIN Notary WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN NotaryService WITH PASSWORD = 'yourStrong(!)Password'; CREATE LOGIN NotaryService WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN NotaryService_10000 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN NotaryService_10003 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN NotaryService0 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN NotaryService1 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN NotaryService2 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN NotaryService_0 WITH PASSWORD = 'yourStrong(!)Password'; CREATE LOGIN NotaryService_0 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN NotaryService_1 WITH PASSWORD = 'yourStrong(!)Password'; CREATE LOGIN NotaryService_1 WITH PASSWORD = 'yourStrong(!)Password';
CREATE LOGIN NotaryService_2 WITH PASSWORD = 'yourStrong(!)Password'; CREATE LOGIN NotaryService_2 WITH PASSWORD = 'yourStrong(!)Password';

View File

@ -0,0 +1,51 @@
DROP TABLE ${schema}.cash_state_participants CASCADE CONSTRAINTS
DROP TABLE ${schema}.cash_states_v2_participants CASCADE CONSTRAINTS
DROP TABLE ${schema}.cp_states_v2_participants CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_linear_state_parts CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_linear_states_v2_parts CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_deal_states_parts CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_attachments CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_checkpoints CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_transactions CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_message_retry CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_message_ids CASCADE CONSTRAINTS
DROP TABLE ${schema}.vault_states CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_our_key_pairs CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_scheduled_states CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_network_map_nodes CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_network_map_subscribers CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_notary_commit_log CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_transaction_mappings CASCADE CONSTRAINTS
DROP TABLE ${schema}.vault_fungible_states_parts CASCADE CONSTRAINTS
DROP TABLE ${schema}.vault_linear_states_parts CASCADE CONSTRAINTS
DROP TABLE ${schema}.vault_fungible_states CASCADE CONSTRAINTS
DROP TABLE ${schema}.vault_linear_states CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_bft_committed_states CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_raft_committed_states CASCADE CONSTRAINTS
DROP TABLE ${schema}.vault_transaction_notes CASCADE CONSTRAINTS
DROP TABLE ${schema}.link_nodeinfo_party CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_link_nodeinfo_party CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_info_party_cert CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_info_hosts CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_infos CASCADE CONSTRAINTS
DROP TABLE ${schema}.cp_states CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_contract_upgrades CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_identities CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_named_identities CASCADE CONSTRAINTS
DROP TABLE ${schema}.children CASCADE CONSTRAINTS
DROP TABLE ${schema}.parents CASCADE CONSTRAINTS
DROP TABLE ${schema}.contract_cash_states CASCADE CONSTRAINTS
DROP TABLE ${schema}.contract_cash_states_v1 CASCADE CONSTRAINTS
DROP TABLE ${schema}.messages CASCADE CONSTRAINTS
DROP TABLE ${schema}.state_participants CASCADE CONSTRAINTS
DROP TABLE ${schema}.cash_states_v2 CASCADE CONSTRAINTS
DROP TABLE ${schema}.cash_states_v3 CASCADE CONSTRAINTS
DROP TABLE ${schema}.cp_states_v1 CASCADE CONSTRAINTS
DROP TABLE ${schema}.cp_states_v2 CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_deal_states CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_linear_states CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_linear_states_v2 CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_mutual_exclusion CASCADE CONSTRAINTS
DROP TABLE ${schema}.DATABASECHANGELOG CASCADE CONSTRAINTS
DROP TABLE ${schema}.DATABASECHANGELOGLOCK CASCADE CONSTRAINTS
DROP SEQUENCE ${schema}.hibernate_sequence

View File

@ -0,0 +1 @@
DROP USER ${schema} CASCADE

View File

@ -0,0 +1,7 @@
DROP USER ${schema} CASCADE
CREATE USER ${schema} IDENTIFIED BY 1234
GRANT UNLIMITED TABLESPACE TO ${schema}
GRANT CREATE SESSION TO ${schema}
GRANT CREATE TABLE TO ${schema}
GRANT CREATE SEQUENCE TO ${schema}
GRANT ALL PRIVILEGES TO ${schema} IDENTIFIED BY 1234

View File

@ -0,0 +1,51 @@
DROP TABLE ${schema}.cash_state_participants CASCADE CONSTRAINTS
DROP TABLE ${schema}.cash_states_v2_participants CASCADE CONSTRAINTS
DROP TABLE ${schema}.cp_states_v2_participants CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_linear_state_parts CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_linear_states_v2_parts CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_deal_states_parts CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_attachments CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_checkpoints CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_transactions CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_message_retry CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_message_ids CASCADE CONSTRAINTS
DROP TABLE ${schema}.vault_states CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_our_key_pairs CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_scheduled_states CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_network_map_nodes CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_network_map_subscribers CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_notary_commit_log CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_transaction_mappings CASCADE CONSTRAINTS
DROP TABLE ${schema}.vault_fungible_states_parts CASCADE CONSTRAINTS
DROP TABLE ${schema}.vault_linear_states_parts CASCADE CONSTRAINTS
DROP TABLE ${schema}.vault_fungible_states CASCADE CONSTRAINTS
DROP TABLE ${schema}.vault_linear_states CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_bft_committed_states CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_raft_committed_states CASCADE CONSTRAINTS
DROP TABLE ${schema}.vault_transaction_notes CASCADE CONSTRAINTS
DROP TABLE ${schema}.link_nodeinfo_party CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_link_nodeinfo_party CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_info_party_cert CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_info_hosts CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_infos CASCADE CONSTRAINTS
DROP TABLE ${schema}.cp_states CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_contract_upgrades CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_identities CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_named_identities CASCADE CONSTRAINTS
DROP TABLE ${schema}.children CASCADE CONSTRAINTS
DROP TABLE ${schema}.parents CASCADE CONSTRAINTS
DROP TABLE ${schema}.contract_cash_states CASCADE CONSTRAINTS
DROP TABLE ${schema}.contract_cash_states_v1 CASCADE CONSTRAINTS
DROP TABLE ${schema}.messages CASCADE CONSTRAINTS
DROP TABLE ${schema}.state_participants CASCADE CONSTRAINTS
DROP TABLE ${schema}.cash_states_v2 CASCADE CONSTRAINTS
DROP TABLE ${schema}.cash_states_v3 CASCADE CONSTRAINTS
DROP TABLE ${schema}.cp_states_v1 CASCADE CONSTRAINTS
DROP TABLE ${schema}.cp_states_v2 CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_deal_states CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_linear_states CASCADE CONSTRAINTS
DROP TABLE ${schema}.dummy_linear_states_v2 CASCADE CONSTRAINTS
DROP TABLE ${schema}.node_mutual_exclusion CASCADE CONSTRAINTS
DROP TABLE ${schema}.DATABASECHANGELOG CASCADE CONSTRAINTS
DROP TABLE ${schema}.DATABASECHANGELOGLOCK CASCADE CONSTRAINTS
DROP SEQUENCE ${schema}.hibernate_sequence

View File

@ -1,9 +1,9 @@
DROP TABLE IF EXISTS ${schema}.cash_state_participants; DROP TABLE IF EXISTS ${schema}.cash_state_participants;
DROP TABLE IF EXISTS ${schema}.cash_states_v2_participants; DROP TABLE IF EXISTS ${schema}.cash_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants; DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.dummy_linear_state_participants; DROP TABLE IF EXISTS ${schema}.dummy_linear_state_parts;
DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_participants; DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_parts;
DROP TABLE IF EXISTS ${schema}.dummy_deal_states_participants; DROP TABLE IF EXISTS ${schema}.dummy_deal_states_parts;
DROP TABLE IF EXISTS ${schema}.node_attachments; DROP TABLE IF EXISTS ${schema}.node_attachments;
DROP TABLE IF EXISTS ${schema}.node_checkpoints; DROP TABLE IF EXISTS ${schema}.node_checkpoints;
DROP TABLE IF EXISTS ${schema}.node_transactions; DROP TABLE IF EXISTS ${schema}.node_transactions;

View File

@ -1,9 +1,9 @@
DROP TABLE IF EXISTS ${schema}.cash_state_participants; DROP TABLE IF EXISTS ${schema}.cash_state_participants;
DROP TABLE IF EXISTS ${schema}.cash_states_v2_participants; DROP TABLE IF EXISTS ${schema}.cash_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants; DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.dummy_linear_state_participants; DROP TABLE IF EXISTS ${schema}.dummy_linear_state_parts;
DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_participants; DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_parts;
DROP TABLE IF EXISTS ${schema}.dummy_deal_states_participants; DROP TABLE IF EXISTS ${schema}.dummy_deal_states_parts;
DROP TABLE IF EXISTS ${schema}.node_attachments; DROP TABLE IF EXISTS ${schema}.node_attachments;
DROP TABLE IF EXISTS ${schema}.node_checkpoints; DROP TABLE IF EXISTS ${schema}.node_checkpoints;
DROP TABLE IF EXISTS ${schema}.node_transactions; DROP TABLE IF EXISTS ${schema}.node_transactions;

View File

@ -1,9 +1,9 @@
DROP TABLE IF EXISTS ${schema}.cash_state_participants; DROP TABLE IF EXISTS ${schema}.cash_state_participants;
DROP TABLE IF EXISTS ${schema}.cash_states_v2_participants; DROP TABLE IF EXISTS ${schema}.cash_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants; DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.dummy_linear_state_participants; DROP TABLE IF EXISTS ${schema}.dummy_linear_state_parts;
DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_participants; DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_parts;
DROP TABLE IF EXISTS ${schema}.dummy_deal_states_participants; DROP TABLE IF EXISTS ${schema}.dummy_deal_states_parts;
DROP TABLE IF EXISTS ${schema}.node_attachments; DROP TABLE IF EXISTS ${schema}.node_attachments;
DROP TABLE IF EXISTS ${schema}.node_checkpoints; DROP TABLE IF EXISTS ${schema}.node_checkpoints;
DROP TABLE IF EXISTS ${schema}.node_transactions; DROP TABLE IF EXISTS ${schema}.node_transactions;

View File

@ -1,9 +1,9 @@
DROP TABLE IF EXISTS ${schema}.cash_state_participants; DROP TABLE IF EXISTS ${schema}.cash_state_participants;
DROP TABLE IF EXISTS ${schema}.cash_states_v2_participants; DROP TABLE IF EXISTS ${schema}.cash_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants; DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.dummy_linear_state_participants; DROP TABLE IF EXISTS ${schema}.dummy_linear_state_parts;
DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_participants; DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_parts;
DROP TABLE IF EXISTS ${schema}.dummy_deal_states_participants; DROP TABLE IF EXISTS ${schema}.dummy_deal_states_parts;
DROP TABLE IF EXISTS ${schema}.node_attachments; DROP TABLE IF EXISTS ${schema}.node_attachments;
DROP TABLE IF EXISTS ${schema}.node_checkpoints; DROP TABLE IF EXISTS ${schema}.node_checkpoints;
DROP TABLE IF EXISTS ${schema}.node_transactions; DROP TABLE IF EXISTS ${schema}.node_transactions;

View File

@ -1,9 +1,9 @@
DROP TABLE IF EXISTS ${schema}.cash_state_participants; DROP TABLE IF EXISTS ${schema}.cash_state_participants;
DROP TABLE IF EXISTS ${schema}.cash_states_v2_participants; DROP TABLE IF EXISTS ${schema}.cash_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants; DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.dummy_linear_state_participants; DROP TABLE IF EXISTS ${schema}.dummy_linear_state_parts;
DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_participants; DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_parts;
DROP TABLE IF EXISTS ${schema}.dummy_deal_states_participants; DROP TABLE IF EXISTS ${schema}.dummy_deal_states_parts;
DROP TABLE IF EXISTS ${schema}.node_attachments; DROP TABLE IF EXISTS ${schema}.node_attachments;
DROP TABLE IF EXISTS ${schema}.node_checkpoints; DROP TABLE IF EXISTS ${schema}.node_checkpoints;
DROP TABLE IF EXISTS ${schema}.node_transactions; DROP TABLE IF EXISTS ${schema}.node_transactions;

View File

@ -1,9 +1,9 @@
DROP TABLE IF EXISTS ${schema}.cash_state_participants; DROP TABLE IF EXISTS ${schema}.cash_state_participants;
DROP TABLE IF EXISTS ${schema}.cash_states_v2_participants; DROP TABLE IF EXISTS ${schema}.cash_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants; DROP TABLE IF EXISTS ${schema}.cp_states_v2_participants;
DROP TABLE IF EXISTS ${schema}.dummy_linear_state_participants; DROP TABLE IF EXISTS ${schema}.dummy_linear_state_parts;
DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_participants; DROP TABLE IF EXISTS ${schema}.dummy_linear_states_v2_parts;
DROP TABLE IF EXISTS ${schema}.dummy_deal_states_participants; DROP TABLE IF EXISTS ${schema}.dummy_deal_states_parts;
DROP TABLE IF EXISTS ${schema}.node_attachments; DROP TABLE IF EXISTS ${schema}.node_attachments;
DROP TABLE IF EXISTS ${schema}.node_checkpoints; DROP TABLE IF EXISTS ${schema}.node_checkpoints;
DROP TABLE IF EXISTS ${schema}.node_transactions; DROP TABLE IF EXISTS ${schema}.node_transactions;

View File

@ -0,0 +1,10 @@
dataSourceProperties = {
dataSourceClassName = "oracle.jdbc.pool.OracleDataSource"
dataSource.url = "jdbc:oracle:thin:@[IP]:[PORT]:xe"
dataSource.user = ${nodeOrganizationName}
dataSource.password = 1234
}
database = {
transactionIsolationLevel = READ_COMMITTED
schema = ${nodeOrganizationName}
}

View File

@ -0,0 +1,10 @@
dataSourceProperties = {
dataSourceClassName = "oracle.jdbc.pool.OracleDataSource"
dataSource.url = "jdbc:oracle:thin:@[IP]:[PORT]:xe"
dataSource.user = ${nodeOrganizationName}
dataSource.password = 1234
}
database = {
transactionIsolationLevel = READ_COMMITTED
schema = ${nodeOrganizationName}
}

View File

@ -21,7 +21,7 @@
<addPrimaryKey columnNames="output_index, transaction_id" constraintName="PK_dummy_deal_states" <addPrimaryKey columnNames="output_index, transaction_id" constraintName="PK_dummy_deal_states"
tableName="dummy_deal_states"/> tableName="dummy_deal_states"/>
<createTable tableName="dummy_deal_states_participants"> <createTable tableName="dummy_deal_states_parts">
<column name="output_index" type="INT"> <column name="output_index" type="INT">
<constraints nullable="false"/> <constraints nullable="false"/>
</column> </column>
@ -32,7 +32,7 @@
</createTable> </createTable>
<addForeignKeyConstraint baseColumnNames="output_index,transaction_id" <addForeignKeyConstraint baseColumnNames="output_index,transaction_id"
baseTableName="dummy_deal_states_participants" baseTableName="dummy_deal_states_parts"
constraintName="FKg6ab1fhx6aesnmhlo62o4dnrt" constraintName="FKg6ab1fhx6aesnmhlo62o4dnrt"
referencedColumnNames="output_index,transaction_id" referencedColumnNames="output_index,transaction_id"
referencedTableName="dummy_deal_states"/> referencedTableName="dummy_deal_states"/>

View File

@ -23,7 +23,7 @@
<addPrimaryKey columnNames="output_index, transaction_id" constraintName="CONSTRAINT_6" <addPrimaryKey columnNames="output_index, transaction_id" constraintName="CONSTRAINT_6"
tableName="dummy_linear_states"/> tableName="dummy_linear_states"/>
<createTable tableName="dummy_linear_state_participants"> <createTable tableName="dummy_linear_state_parts">
<column name="output_index" type="INT"> <column name="output_index" type="INT">
<constraints nullable="false"/> <constraints nullable="false"/>
</column> </column>
@ -34,7 +34,7 @@
</createTable> </createTable>
<addForeignKeyConstraint baseColumnNames="output_index,transaction_id" <addForeignKeyConstraint baseColumnNames="output_index,transaction_id"
baseTableName="dummy_linear_state_participants" baseTableName="dummy_linear_state_parts"
constraintName="FK8y6k4thqq0udg07ojffpbgblq" constraintName="FK8y6k4thqq0udg07ojffpbgblq"
referencedColumnNames="output_index,transaction_id" referencedColumnNames="output_index,transaction_id"
referencedTableName="dummy_linear_states"/> referencedTableName="dummy_linear_states"/>

View File

@ -24,7 +24,7 @@
<addPrimaryKey columnNames="output_index, transaction_id" constraintName="CONSTRAINT_E" <addPrimaryKey columnNames="output_index, transaction_id" constraintName="CONSTRAINT_E"
tableName="dummy_linear_states_v2"/> tableName="dummy_linear_states_v2"/>
<createTable tableName="dummy_linear_states_v2_participants"> <createTable tableName="dummy_linear_states_v2_parts">
<column name="output_index" type="INT"> <column name="output_index" type="INT">
<constraints nullable="false"/> <constraints nullable="false"/>
</column> </column>
@ -35,7 +35,7 @@
</createTable> </createTable>
<addForeignKeyConstraint baseColumnNames="output_index,transaction_id" <addForeignKeyConstraint baseColumnNames="output_index,transaction_id"
baseTableName="dummy_linear_states_v2_participants" baseTableName="dummy_linear_states_v2_parts"
constraintName="FKlfb5dvtmuadsllckeghmjlckk" constraintName="FKlfb5dvtmuadsllckeghmjlckk"
referencedColumnNames="output_index,transaction_id" referencedColumnNames="output_index,transaction_id"
referencedTableName="dummy_linear_states_v2"/> referencedTableName="dummy_linear_states_v2"/>