mirror of
https://github.com/corda/corda.git
synced 2024-12-18 20:47:57 +00:00
Merge branch 'master' into shams-master-merge-081217
# Conflicts: # node/src/main/kotlin/net/corda/node/services/config/NodeConfiguration.kt # testing/node-driver/src/main/kotlin/net/corda/testing/driver/Driver.kt # testing/node-driver/src/main/kotlin/net/corda/testing/internal/DriverDSLImpl.kt # testing/node-driver/src/main/kotlin/net/corda/testing/internal/RPCDriver.kt # testing/node-driver/src/main/kotlin/net/corda/testing/internal/demorun/DemoRunner.kt # verifier/src/integration-test/kotlin/net/corda/verifier/VerifierDriver.kt
This commit is contained in:
commit
d6054e4b4f
@ -24,7 +24,7 @@ buildscript {
|
||||
ext.jackson_version = '2.9.2'
|
||||
ext.jetty_version = '9.4.7.v20170914'
|
||||
ext.jersey_version = '2.25'
|
||||
ext.jolokia_version = '2.0.0-M3'
|
||||
ext.jolokia_version = '1.3.7'
|
||||
ext.assertj_version = '3.8.0'
|
||||
ext.slf4j_version = '1.7.25'
|
||||
ext.log4j_version = '2.9.1'
|
||||
@ -49,6 +49,7 @@ buildscript {
|
||||
ext.beanutils_version = '1.9.3'
|
||||
ext.crash_version = 'cce5a00f114343c1145c1d7756e1dd6df3ea984e'
|
||||
ext.jsr305_version = constants.getProperty("jsr305Version")
|
||||
ext.shiro_version = '1.4.0'
|
||||
ext.artifactory_plugin_version = constants.getProperty('artifactoryPluginVersion')
|
||||
|
||||
// Update 121 is required for ObjectInputFilter and at time of writing 131 was latest:
|
||||
|
@ -66,8 +66,8 @@ class RPCStabilityTests {
|
||||
val executor = Executors.newScheduledThreadPool(1)
|
||||
fun startAndStop() {
|
||||
rpcDriver {
|
||||
val server = startRpcServer<RPCOps>(ops = DummyOps)
|
||||
startRpcClient<RPCOps>(server.get().broker.hostAndPort!!).get()
|
||||
val server = startRpcServer<RPCOps>(ops = DummyOps).get()
|
||||
startRpcClient<RPCOps>(server.broker.hostAndPort!!).get()
|
||||
}
|
||||
}
|
||||
repeat(5) {
|
||||
|
@ -22,7 +22,10 @@ import net.corda.core.internal.ThreadBox
|
||||
import net.corda.core.messaging.RPCOps
|
||||
import net.corda.core.serialization.SerializationContext
|
||||
import net.corda.core.serialization.serialize
|
||||
import net.corda.core.utilities.*
|
||||
import net.corda.core.utilities.Try
|
||||
import net.corda.core.utilities.contextLogger
|
||||
import net.corda.core.utilities.debug
|
||||
import net.corda.core.utilities.getOrThrow
|
||||
import net.corda.nodeapi.ArtemisConsumer
|
||||
import net.corda.nodeapi.ArtemisProducer
|
||||
import net.corda.nodeapi.RPCApi
|
||||
|
@ -1,8 +1,6 @@
|
||||
package net.corda.client.rpc
|
||||
|
||||
import net.corda.core.messaging.CordaRPCOps
|
||||
import net.corda.core.messaging.RPCOps
|
||||
import net.corda.node.services.Permissions.Companion.invokeRpc
|
||||
import net.corda.node.services.messaging.rpcContext
|
||||
import net.corda.nodeapi.internal.config.User
|
||||
import net.corda.testing.internal.RPCDriverDSL
|
||||
@ -10,15 +8,12 @@ import net.corda.testing.internal.rpcDriver
|
||||
import org.junit.Test
|
||||
import org.junit.runner.RunWith
|
||||
import org.junit.runners.Parameterized
|
||||
import kotlin.reflect.KVisibility
|
||||
import kotlin.reflect.full.declaredMemberFunctions
|
||||
import kotlin.test.assertFailsWith
|
||||
|
||||
@RunWith(Parameterized::class)
|
||||
class RPCPermissionsTests : AbstractRPCTest() {
|
||||
companion object {
|
||||
const val DUMMY_FLOW = "StartFlow.net.corda.flows.DummyFlow"
|
||||
const val OTHER_FLOW = "StartFlow.net.corda.flows.OtherFlow"
|
||||
const val ALL_ALLOWED = "ALL"
|
||||
}
|
||||
|
||||
@ -26,12 +21,21 @@ class RPCPermissionsTests : AbstractRPCTest() {
|
||||
* RPC operation.
|
||||
*/
|
||||
interface TestOps : RPCOps {
|
||||
fun validatePermission(str: String)
|
||||
fun validatePermission(method: String, target: String? = null)
|
||||
}
|
||||
|
||||
class TestOpsImpl : TestOps {
|
||||
override val protocolVersion = 1
|
||||
override fun validatePermission(str: String) { rpcContext().requirePermission(str) }
|
||||
override fun validatePermission(method: String, target: String?) {
|
||||
val authorized = if (target == null) {
|
||||
rpcContext().isPermitted(method)
|
||||
} else {
|
||||
rpcContext().isPermitted(method, target)
|
||||
}
|
||||
if (!authorized) {
|
||||
throw PermissionException("RPC user not authorized")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -46,9 +50,9 @@ class RPCPermissionsTests : AbstractRPCTest() {
|
||||
rpcDriver {
|
||||
val emptyUser = userOf("empty", emptySet())
|
||||
val proxy = testProxyFor(emptyUser)
|
||||
assertFailsWith(PermissionException::class,
|
||||
"User ${emptyUser.username} should not be allowed to use $DUMMY_FLOW.",
|
||||
{ proxy.validatePermission(DUMMY_FLOW) })
|
||||
assertNotAllowed {
|
||||
proxy.validatePermission("startFlowDynamic", "net.corda.flows.DummyFlow")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -57,7 +61,8 @@ class RPCPermissionsTests : AbstractRPCTest() {
|
||||
rpcDriver {
|
||||
val adminUser = userOf("admin", setOf(ALL_ALLOWED))
|
||||
val proxy = testProxyFor(adminUser)
|
||||
proxy.validatePermission(DUMMY_FLOW)
|
||||
proxy.validatePermission("startFlowDynamic", "net.corda.flows.DummyFlow")
|
||||
proxy.validatePermission("startTrackedFlowDynamic", "net.corda.flows.DummyFlow")
|
||||
}
|
||||
}
|
||||
|
||||
@ -66,7 +71,8 @@ class RPCPermissionsTests : AbstractRPCTest() {
|
||||
rpcDriver {
|
||||
val joeUser = userOf("joe", setOf(DUMMY_FLOW))
|
||||
val proxy = testProxyFor(joeUser)
|
||||
proxy.validatePermission(DUMMY_FLOW)
|
||||
proxy.validatePermission("startFlowDynamic", "net.corda.flows.DummyFlow")
|
||||
proxy.validatePermission("startTrackedFlowDynamic", "net.corda.flows.DummyFlow")
|
||||
}
|
||||
}
|
||||
|
||||
@ -75,36 +81,46 @@ class RPCPermissionsTests : AbstractRPCTest() {
|
||||
rpcDriver {
|
||||
val joeUser = userOf("joe", setOf(DUMMY_FLOW))
|
||||
val proxy = testProxyFor(joeUser)
|
||||
assertFailsWith(PermissionException::class,
|
||||
"User ${joeUser.username} should not be allowed to use $OTHER_FLOW",
|
||||
{ proxy.validatePermission(OTHER_FLOW) })
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `check ALL is implemented the correct way round`() {
|
||||
rpcDriver {
|
||||
val joeUser = userOf("joe", setOf(DUMMY_FLOW))
|
||||
val proxy = testProxyFor(joeUser)
|
||||
assertFailsWith(PermissionException::class,
|
||||
"Permission $ALL_ALLOWED should not do anything for User ${joeUser.username}",
|
||||
{ proxy.validatePermission(ALL_ALLOWED) })
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `fine grained permissions are enforced`() {
|
||||
val allPermissions = CordaRPCOps::class.declaredMemberFunctions.filter { it.visibility == KVisibility.PUBLIC }.map { invokeRpc(it) }
|
||||
allPermissions.forEach { permission ->
|
||||
rpcDriver {
|
||||
val user = userOf("Mark", setOf(permission))
|
||||
val proxy = testProxyFor(user)
|
||||
|
||||
proxy.validatePermission(permission)
|
||||
(allPermissions - permission).forEach { notOwnedPermission ->
|
||||
assertFailsWith(PermissionException::class, { proxy.validatePermission(notOwnedPermission) })
|
||||
}
|
||||
assertNotAllowed {
|
||||
proxy.validatePermission("startFlowDynamic", "net.corda.flows.OtherFlow")
|
||||
}
|
||||
assertNotAllowed {
|
||||
proxy.validatePermission("startTrackedFlowDynamic", "net.corda.flows.OtherFlow")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `joe user is not allowed to call other RPC methods`() {
|
||||
rpcDriver {
|
||||
val joeUser = userOf("joe", setOf(DUMMY_FLOW))
|
||||
val proxy = testProxyFor(joeUser)
|
||||
assertNotAllowed {
|
||||
proxy.validatePermission("nodeInfo")
|
||||
}
|
||||
assertNotAllowed {
|
||||
proxy.validatePermission("networkMapFeed")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `checking invokeRpc permissions entitlements`() {
|
||||
rpcDriver {
|
||||
val joeUser = userOf("joe", setOf("InvokeRpc.networkMapFeed"))
|
||||
val proxy = testProxyFor(joeUser)
|
||||
assertNotAllowed {
|
||||
proxy.validatePermission("nodeInfo")
|
||||
}
|
||||
assertNotAllowed {
|
||||
proxy.validatePermission("startTrackedFlowDynamic", "net.corda.flows.OtherFlow")
|
||||
}
|
||||
proxy.validatePermission("networkMapFeed")
|
||||
}
|
||||
}
|
||||
|
||||
private fun assertNotAllowed(action: () -> Unit) {
|
||||
|
||||
assertFailsWith(PermissionException::class, "User should not be allowed to perform this action.", action)
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
|
||||
<!-- an all powerful policy file -->
|
||||
<restrict>
|
||||
<http>
|
||||
<method>post</method>
|
||||
@ -8,23 +8,10 @@
|
||||
|
||||
<commands>
|
||||
<command>read</command>
|
||||
<command>write</command>
|
||||
<command>exec</command>
|
||||
<command>list</command>
|
||||
<command>search</command>
|
||||
<command>version</command>
|
||||
</commands>
|
||||
|
||||
<!-- allow anyone to force a garbage collection -->
|
||||
<allow>
|
||||
<mbean>
|
||||
<name>java.lang:type=Memory</name>
|
||||
<operation>gc</operation>
|
||||
</mbean>
|
||||
</allow>
|
||||
|
||||
<!-- in case we ever end up using c3pio connection pooling, this example from the docs prevents the password being exported -->
|
||||
<deny>
|
||||
<mbean>
|
||||
<name>com.mchange.v2.c3p0:type=PooledDataSource,*</name>
|
||||
<attribute>properties</attribute>
|
||||
</mbean>
|
||||
</deny>
|
||||
|
||||
</restrict>
|
||||
|
24
config/prod/jolokia-access.xml
Normal file
24
config/prod/jolokia-access.xml
Normal file
@ -0,0 +1,24 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Jolokia agent and MBean access policy based security -->
|
||||
<!-- TODO: review these settings before production deployment -->
|
||||
<restrict>
|
||||
<!-- IP based restrictions -->
|
||||
<remote>
|
||||
<!-- IP address, a host name, or a netmask given in CIDR format (e.g. "10.0.0.0/16" for all clients coming from the 10.0 network). -->
|
||||
<host>127.0.0.1</host>
|
||||
<host>localhost</host>
|
||||
</remote>
|
||||
<!-- commands for which access is granted: read, write, exec, list, search, version -->
|
||||
<commands>
|
||||
<command>version</command>
|
||||
<command>read</command>
|
||||
</commands>
|
||||
<!-- MBean access and deny restrictions -->
|
||||
<!-- HTTP method restrictions: get, post -->
|
||||
<http>
|
||||
<method>get</method>
|
||||
</http>
|
||||
<!-- Cross-Origin Resource Sharing (CORS) restrictions
|
||||
(by default, allow cross origin access from any host)
|
||||
-->
|
||||
</restrict>
|
Binary file not shown.
@ -1,15 +1,15 @@
|
||||
Contract Constraints
|
||||
====================
|
||||
API: Contract Constraints
|
||||
=========================
|
||||
|
||||
A basic understanding of contract key concepts, which can be found :doc:`here </key-concepts-contracts>`,
|
||||
is required reading for this page.
|
||||
|
||||
Transaction states specify a constraint over the contract that will be used to verify it. For a transaction to be
|
||||
valid, the verify() function associated with each state must run successfully. However, for this to be secure, it is
|
||||
not sufficient to specify the verify() function by name as there may exist multiple different implementations with the
|
||||
same method signature and enclosing class. Contract constraints solve this problem by allowing a contract developer to
|
||||
constrain which verify() functions out of the universe of implementations can be used.
|
||||
(ie the universe is everything that matches the signature and contract constraints restricts this universe to a subset.)
|
||||
valid, the ``verify`` function associated with each state must run successfully. However, for this to be secure, it is
|
||||
not sufficient to specify the ``verify`` function by name as there may exist multiple different implementations with
|
||||
the same method signature and enclosing class. Contract constraints solve this problem by allowing a contract developer
|
||||
to constrain which ``verify`` functions out of the universe of implementations can be used (i.e. the universe is
|
||||
everything that matches the signature and contract constraints restricts this universe to a subset).
|
||||
|
||||
A typical constraint is the hash of the CorDapp JAR that contains the contract and states but will in future releases
|
||||
include constraints that require specific signers of the JAR, or both the signer and the hash. Constraints can be
|
||||
@ -20,12 +20,13 @@ constructs a ``TransactionState`` without specifying the constraint parameter a
|
||||
(``AutomaticHashConstraint``) is used. This default will be automatically resolved to a specific
|
||||
``HashAttachmentConstraint`` that contains the hash of the attachment which contains the contract of that
|
||||
``TransactionState``. This automatic resolution occurs when a ``TransactionBuilder`` is converted to a
|
||||
``WireTransaction``. This reduces the boilerplate involved in finding a specific hash constraint when building a transaction.
|
||||
``WireTransaction``. This reduces the boilerplate involved in finding a specific hash constraint when building a
|
||||
transaction.
|
||||
|
||||
It is possible to specify the constraint explicitly with any other class that implements the ``AttachmentConstraint``
|
||||
interface. To specify a hash manually the ``HashAttachmentConstraint`` can be used and to not provide any constraint
|
||||
the ``AlwaysAcceptAttachmentConstraint`` can be used - though this is intended for testing only. An example below
|
||||
shows how to construct a ``TransactionState`` with an explicitly specified hash constraint from within a flow;
|
||||
shows how to construct a ``TransactionState`` with an explicitly specified hash constraint from within a flow:
|
||||
|
||||
.. sourcecode:: java
|
||||
|
||||
@ -42,12 +43,11 @@ shows how to construct a ``TransactionState`` with an explicitly specified hash
|
||||
LedgerTransaction ltx = wtx.toLedgerTransaction(serviceHub)
|
||||
ltx.verify() // Verifies both the attachment constraints and contracts
|
||||
|
||||
|
||||
This mechanism exists both for integrity and security reasons. It is important not to verify against the wrong contract,
|
||||
which could happen if the wrong version of the contract is attached. More importantly when resolving transaction chains
|
||||
there will, in a future release, be attachments loaded from the network into the attachment sandbox that are used
|
||||
to verify the transaction chain. Ensuring the attachment used is the correct one ensures that the verification will
|
||||
not be tamperable by providing a fake contract.
|
||||
to verify the transaction chain. Ensuring the attachment used is the correct one ensures that the verification is
|
||||
tamper-proof by providing a fake contract.
|
||||
|
||||
CorDapps as attachments
|
||||
-----------------------
|
||||
@ -55,10 +55,10 @@ CorDapps as attachments
|
||||
CorDapp JARs (:doc:`cordapp-overview`) that are installed to the node and contain classes implementing the ``Contract``
|
||||
interface are automatically loaded into the ``AttachmentStorage`` of a node at startup.
|
||||
|
||||
After CorDapps are loaded into the attachment store the node creates a link between contract classes and the
|
||||
attachment that they were loaded from. This makes it possible to find the attachment for any given contract.
|
||||
This is how the automatic resolution of attachments is done by the ``TransactionBuilder`` and how, when verifying
|
||||
the constraints and contracts, attachments are associated with their respective contracts.
|
||||
After CorDapps are loaded into the attachment store the node creates a link between contract classes and the attachment
|
||||
that they were loaded from. This makes it possible to find the attachment for any given contract. This is how the
|
||||
automatic resolution of attachments is done by the ``TransactionBuilder`` and how, when verifying the constraints and
|
||||
contracts, attachments are associated with their respective contracts.
|
||||
|
||||
Implementations
|
||||
---------------
|
||||
@ -95,7 +95,7 @@ to specify JAR URLs in the case that the CorDapp(s) involved in testing already
|
||||
MockNetwork/MockNode
|
||||
********************
|
||||
|
||||
The most simple way to ensure that a vanilla instance of a MockNode generates the correct CorDapps is to use the
|
||||
The simplest way to ensure that a vanilla instance of a MockNode generates the correct CorDapps is to use the
|
||||
``cordappPackages`` constructor parameter (Kotlin) or the ``setCordappPackages`` method on ``MockNetworkParameters`` (Java)
|
||||
when creating the MockNetwork. This will cause the ``AbstractNode`` to use the named packages as sources for CorDapps. All files
|
||||
within those packages will be zipped into a JAR and added to the attachment store and loaded as CorDapps by the
|
@ -6,6 +6,9 @@ from the previous milestone release.
|
||||
|
||||
UNRELEASED
|
||||
----------
|
||||
* Exporting additional JMX metrics (artemis, hibernate statistics) and loading Jolokia agent at JVM startup when using
|
||||
DriverDSL and/or cordformation node runner.
|
||||
|
||||
* Removed confusing property database.initDatabase, enabling its guarded behaviour with the dev-mode.
|
||||
In devMode Hibernate will try to create or update database schemas, otherwise it will expect relevant schemas to be present
|
||||
in the database (pre configured via DDL scripts or equivalent), and validate these are correct.
|
||||
|
@ -4,6 +4,6 @@ Cheat sheet
|
||||
A "cheat sheet" summarizing the key Corda types. A PDF version is downloadable `here`_.
|
||||
|
||||
.. image:: resources/cheatsheet.jpg
|
||||
:width: 700px
|
||||
:width: 700px
|
||||
|
||||
.. _`here`: _static/corda-cheat-sheet.pdf
|
@ -9,6 +9,7 @@ The following are the core APIs that are used in the development of CorDapps:
|
||||
api-states
|
||||
api-persistence
|
||||
api-contracts
|
||||
api-contract-constraints
|
||||
api-vault-query
|
||||
api-transactions
|
||||
api-flows
|
||||
|
@ -73,6 +73,7 @@ path to the node's base directory.
|
||||
:serverNameTablePrefix: Prefix string to apply to all the database tables. The default is no prefix.
|
||||
:transactionIsolationLevel: Transaction isolation level as defined by the ``TRANSACTION_`` constants in
|
||||
``java.sql.Connection``, but without the "TRANSACTION_" prefix. Defaults to REPEATABLE_READ.
|
||||
:exportHibernateJMXStatistics: Whether to export Hibernate JMX statistics (caution: expensive run-time overhead)
|
||||
|
||||
:dataSourceProperties: This section is used to configure the jdbc connection and database driver used for the nodes persistence.
|
||||
Currently the defaults in ``/node/src/main/resources/reference.conf`` are as shown in the first example. This is currently
|
||||
@ -163,7 +164,9 @@ path to the node's base directory.
|
||||
Each should be a string. Only the JARs in the directories are added, not the directories themselves. This is useful
|
||||
for including JDBC drivers and the like. e.g. ``jarDirs = [ 'lib' ]``
|
||||
|
||||
:sshd: If provided, node will start internal SSH server which will provide a management shell. It uses the same credentials
|
||||
and permissions as RPC subsystem. It has one required parameter.
|
||||
:sshd: If provided, node will start internal SSH server which will provide a management shell. It uses the same credentials and permissions as RPC subsystem. It has one required parameter.
|
||||
|
||||
:port: The port to start SSH server on
|
||||
|
||||
:exportJMXTo: If set to ``http``, will enable JMX metrics reporting via the Jolokia HTTP/JSON agent.
|
||||
Default Jolokia access url is http://127.0.0.1:7005/jolokia/
|
@ -10,6 +10,7 @@ Corda nodes
|
||||
corda-configuration-file
|
||||
clientrpc
|
||||
shell
|
||||
node-auth-config
|
||||
node-database
|
||||
node-administration
|
||||
out-of-process-verification
|
@ -23,7 +23,22 @@ into the ``cordapps`` folder.
|
||||
|
||||
Node naming
|
||||
-----------
|
||||
A node's name must be a valid X.500 name that obeys the following additional constraints:
|
||||
A node's name must be a valid X.500 distinguished name. In order to be compatible with other implementations
|
||||
(particularly TLS implementations), we constrain the allowed X.500 attribute types to a subset of the minimum supported
|
||||
set for X.509 certificates (specified in RFC 3280), plus the locality attribute:
|
||||
|
||||
* Organization (O)
|
||||
* State (ST)
|
||||
* Locality (L)
|
||||
* Country (C)
|
||||
* Organizational-unit (OU)
|
||||
* Common name (CN) (only used for service identities)
|
||||
|
||||
The name must also obey the following constraints:
|
||||
|
||||
* The organisation, locality and country attributes are present
|
||||
|
||||
* The state, organisational-unit and common name attributes are optional
|
||||
|
||||
* The fields of the name have the following maximum character lengths:
|
||||
|
||||
@ -33,21 +48,22 @@ A node's name must be a valid X.500 name that obeys the following additional con
|
||||
* Locality: 64
|
||||
* State: 64
|
||||
|
||||
* The country code is a valid ISO 3166-1 two letter code in upper-case
|
||||
|
||||
* The organisation, locality and country attributes are present
|
||||
* The country attribute is a valid ISO 3166-1 two letter code in upper-case
|
||||
|
||||
* The organisation field of the name obeys the following constraints:
|
||||
|
||||
* Upper-case first letter
|
||||
* Has at least two letters
|
||||
* No leading or trailing whitespace
|
||||
* No double-spacing
|
||||
* Upper-case first letter
|
||||
* Does not contain the words "node" or "server"
|
||||
* Does not include the characters ',' or '=' or '$' or '"' or '\'' or '\\'
|
||||
* Does not include the following characters: ``,`` , ``=`` , ``$`` , ``"`` , ``'`` , ``\``
|
||||
* Is in NFKC normalization form
|
||||
* Only the latin, common and inherited unicode scripts are supported
|
||||
|
||||
* This is to avoid right-to-left issues, debugging issues when we can't pronounce names over the phone, and
|
||||
character confusability attacks
|
||||
|
||||
The Cordform task
|
||||
-----------------
|
||||
Corda provides a gradle plugin called ``Cordform`` that allows you to automatically generate and configure a set of
|
||||
|
@ -41,14 +41,4 @@ Nodes can provide several types of services:
|
||||
* One or more pluggable **notary services**. Notaries guarantee the uniqueness, and possibility the validity, of ledger
|
||||
updates. Each notary service may be run on a single node, or across a cluster of nodes.
|
||||
* Zero or more **oracle services**. An oracle is a well-known service that signs transactions if they state a fact and
|
||||
that fact is considered to be true.
|
||||
|
||||
These components are illustrated in the following diagram:
|
||||
|
||||
.. image:: resources/cordaNetwork.png
|
||||
:scale: 25%
|
||||
:align: center
|
||||
|
||||
In this diagram, Corda infrastructure services are those upon which all participants depend, such as the network map
|
||||
and notary services. Corda services may be deployed by participants, third parties or a central network operator
|
||||
(such as R3). The diagram is not intended to imply that only a centralised model is supported.
|
||||
that fact is considered to be true.
|
@ -34,33 +34,6 @@ only shared with those who need to see them, and planned use of Intel SGX, it is
|
||||
privacy breaches. Confidential identities are used to ensure that even if a third party gets access to an unencrypted
|
||||
transaction, they cannot identify the participants without additional information.
|
||||
|
||||
Name
|
||||
----
|
||||
|
||||
Identity names are X.500 distinguished names with Corda-specific constraints applied. In order to be compatible with
|
||||
other implementations (particularly TLS implementations), we constrain the allowed X.500 attribute types to a subset of
|
||||
the minimum supported set for X.509 certificates (specified in RFC 3280), plus the locality attribute:
|
||||
|
||||
* organization (O)
|
||||
* state (ST)
|
||||
* locality (L)
|
||||
* country (C)
|
||||
* organizational-unit (OU)
|
||||
* common name (CN) - used only for service identities
|
||||
|
||||
The organisation, locality and country attributes are required, while state, organisational-unit and common name are
|
||||
optional. Attributes cannot be be present more than once in the name.
|
||||
|
||||
All of these attributes have the following set of constraints applied for security reasons:
|
||||
|
||||
- No blacklisted words (currently "node" and "server").
|
||||
- Restrict names to Latin scripts for now to avoid right-to-left issues, debugging issues when we can't pronounce names over the phone, and character confusability attacks.
|
||||
- No commas or equals signs.
|
||||
- No dollars or quote marks.
|
||||
|
||||
Additionally the "organisation" attribute must consist of at least three letters and starting with a capital letter,
|
||||
and "country code" is strictly restricted to valid ISO 3166-1 two letter codes.
|
||||
|
||||
Certificates
|
||||
------------
|
||||
|
||||
@ -82,6 +55,4 @@ business sensitive details of transactions). In some cases nodes may also use pr
|
||||
to the main network map service, for operational reasons. Identities registered with such network maps must be
|
||||
considered well known, and it is never appropriate to store confidential identities in a central directory without
|
||||
controls applied at the record level to ensure only those who require access to an identity can retrieve its
|
||||
certificate.
|
||||
|
||||
.. TODO: Revisit once design & use cases of private maps is further fleshed out
|
||||
certificate.
|
@ -16,7 +16,6 @@ This section should be read in order:
|
||||
key-concepts-identity
|
||||
key-concepts-states
|
||||
key-concepts-contracts
|
||||
key-concepts-contract-constraints
|
||||
key-concepts-transactions
|
||||
key-concepts-flows
|
||||
key-concepts-consensus
|
||||
|
@ -93,6 +93,8 @@ formats for accessing MBeans, and provides client libraries to work with that pr
|
||||
|
||||
Here are a few ways to build dashboards and extract monitoring data for a node:
|
||||
|
||||
* `hawtio <https://hawt.io>`_ is a web based console that connects directly to JVM's that have been instrumented with a
|
||||
jolokia agent. This tool provides a nice JMX dashboard very similar to the traditional JVisualVM / JConsole MBbeans original.
|
||||
* `JMX2Graphite <https://github.com/logzio/jmx2graphite>`_ is a tool that can be pointed to /monitoring/json and will
|
||||
scrape the statistics found there, then insert them into the Graphite monitoring tool on a regular basis. It runs
|
||||
in Docker and can be started with a single command.
|
||||
@ -105,6 +107,29 @@ Here are a few ways to build dashboards and extract monitoring data for a node:
|
||||
It can bridge any data input to any output using their plugin system, for example, Telegraf can
|
||||
be configured to collect data from Jolokia and write to DataDog web api.
|
||||
|
||||
The Node configuration parameter `exportJMXTo` should be set to ``http`` to ensure a Jolokia agent is instrumented with
|
||||
the JVM run-time.
|
||||
|
||||
The following JMX statistics are exported:
|
||||
|
||||
* Corda specific metrics: flow information (total started, finished, in-flight; flow duration by flow type), attachments (count)
|
||||
* Apache Artemis metrics: queue information for P2P and RPC services
|
||||
* JVM statistics: classloading, garbage collection, memory, runtime, threading, operating system
|
||||
* Hibernate statistics (only when node is started-up in `devMode` due to to expensive run-time costs)
|
||||
|
||||
When starting Corda nodes using Cordformation runner (see :doc:`running-a-node`), you should see a startup message similar to the following:
|
||||
**Jolokia: Agent started with URL http://127.0.0.1:7005/jolokia/**
|
||||
|
||||
When starting Corda nodes using the `DriverDSL`, you should see a startup message in the logs similar to the following:
|
||||
**Starting out-of-process Node USA Bank Corp, debug port is not enabled, jolokia monitoring port is 7005 {}**
|
||||
|
||||
Several Jolokia policy based security configuration files (``jolokia-access.xml``) are available for dev, test, and prod
|
||||
environments under ``/config/<env>``.
|
||||
|
||||
The following diagram illustrates Corda flow metrics visualized using `hawtio <https://hawt.io>`_ :
|
||||
|
||||
.. image:: resources/hawtio-jmx.png
|
||||
|
||||
Memory usage and tuning
|
||||
-----------------------
|
||||
|
||||
|
136
docs/source/node-auth-config.rst
Normal file
136
docs/source/node-auth-config.rst
Normal file
@ -0,0 +1,136 @@
|
||||
Access security settings
|
||||
========================
|
||||
|
||||
Access to node functionalities via SSH or RPC is protected by an authentication and authorisation policy.
|
||||
|
||||
The field ``security`` in ``node.conf`` exposes various sub-fields related to authentication/authorisation specifying:
|
||||
|
||||
* The data source providing credentials and permissions for users (e.g.: a remote RDBMS)
|
||||
* An optional password encryption method.
|
||||
* An optional caching of users data from Node side.
|
||||
|
||||
.. warning:: Specifying both ``rpcUsers`` and ``security`` fields in ``node.conf`` is considered an illegal setting and
|
||||
rejected by the node at startup since ``rpcUsers`` is effectively deprecated in favour of ``security.authService``.
|
||||
|
||||
**Example 1:** connect to remote RDBMS for credentials/permissions, with encrypted user passwords and
|
||||
caching on node-side:
|
||||
|
||||
.. container:: codeset
|
||||
|
||||
.. sourcecode:: groovy
|
||||
|
||||
security = {
|
||||
authService = {
|
||||
dataSource = {
|
||||
type = "DB",
|
||||
passwordEncryption = "SHIRO_1_CRYPT",
|
||||
connection = {
|
||||
jdbcUrl = "<jdbc connection string>"
|
||||
username = "<db username>"
|
||||
password = "<db user password>"
|
||||
driverClassName = "<JDBC driver>"
|
||||
}
|
||||
}
|
||||
options = {
|
||||
cache = {
|
||||
expiryTimeSecs = 120
|
||||
capacity = 10000
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
**Example 2:** list of user credentials and permissions hard-coded in ``node.conf``
|
||||
|
||||
.. container:: codeset
|
||||
|
||||
.. sourcecode:: groovy
|
||||
|
||||
security = {
|
||||
authService = {
|
||||
dataSource = {
|
||||
type = "INMEMORY",
|
||||
users =[
|
||||
{
|
||||
username = "user1"
|
||||
password = "password"
|
||||
permissions = [
|
||||
"StartFlow.net.corda.flows.ExampleFlow1",
|
||||
"StartFlow.net.corda.flows.ExampleFlow2",
|
||||
...
|
||||
]
|
||||
},
|
||||
...
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Let us look in more details at the structure of ``security.authService``:
|
||||
|
||||
Authentication/authorisation data
|
||||
---------------------------------
|
||||
|
||||
The ``dataSource`` field defines the data provider supplying credentials and permissions for users. The ``type``
|
||||
subfield identify the type of data provider, currently supported one are:
|
||||
|
||||
* **INMEMORY:** a list of user credentials and permissions hard-coded in configuration in the ``users`` field
|
||||
(see example 2 above)
|
||||
|
||||
* **DB:** An external RDBMS accessed via the JDBC connection described by ``connection``. The current implementation
|
||||
expect the database to store data according to the following schema:
|
||||
|
||||
- Table ``users`` containing columns ``username`` and ``password``.
|
||||
The ``username`` column *must have unique values*.
|
||||
- Table ``user_roles`` containing columns ``username`` and ``role_name`` associating a user to a set of *roles*
|
||||
- Table ``roles_permissions`` containing columns ``role_name`` and ``permission`` associating a role to a set of
|
||||
permission strings
|
||||
|
||||
Note in particular how in the DB case permissions are assigned to _roles_ rather than individual users.
|
||||
Also, there is no prescription on the SQL type of the columns (although in our tests we defined ``username`` and
|
||||
``role_name`` of SQL type ``VARCHAR`` and ``password`` of ``TEXT`` type) and it is allowed to put additional columns
|
||||
besides the one expected by the implementation.
|
||||
|
||||
Password encryption
|
||||
-------------------
|
||||
|
||||
Storing passwords in plain text is discouraged in production systems aiming for high security requirements. We support
|
||||
reading passwords stored using the Apache Shiro fully reversible Modular Crypt Format, specified in the documentation
|
||||
of ``org.apache.shiro.crypto.hash.format.Shiro1CryptFormat``.
|
||||
|
||||
Password are assumed in plain format by default. To specify an encryption it is necessary to use the field:
|
||||
|
||||
.. container:: codeset
|
||||
|
||||
.. sourcecode:: groovy
|
||||
|
||||
passwordEncryption = SHIRO_1_CRYPT
|
||||
|
||||
Hash encrypted password based on the Shiro1CryptFormat can be produced with the `Apache Shiro Hasher tool <https://shiro.apache.org/command-line-hasher.html>`_
|
||||
|
||||
Cache
|
||||
-----
|
||||
|
||||
Adding a cache layer on top of an external provider of users credentials and permissions can significantly benefit
|
||||
performances in some cases, with the disadvantage of introducing a latency in the propagation of changes to the data.
|
||||
|
||||
Caching of users data is disabled by default, it can be enabled by defining the ``options.cache`` field, like seen in
|
||||
the examples above:
|
||||
|
||||
.. container:: codeset
|
||||
|
||||
.. sourcecode:: groovy
|
||||
|
||||
options = {
|
||||
cache = {
|
||||
expiryTimeSecs = 120
|
||||
capacity = 10000
|
||||
}
|
||||
}
|
||||
|
||||
This will enable an in-memory cache with maximum capacity (number of entries) and maximum life time of entries given by
|
||||
respectively the values set by the ``capacity`` and ``expiryTimeSecs`` fields.
|
||||
|
||||
|
||||
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 668 KiB After Width: | Height: | Size: 670 KiB |
Binary file not shown.
Before Width: | Height: | Size: 100 KiB |
BIN
docs/source/resources/hawtio-jmx.png
Normal file
BIN
docs/source/resources/hawtio-jmx.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 242 KiB |
@ -23,6 +23,13 @@ class Utils {
|
||||
project.configurations.single { it.name == "compile" }.extendsFrom(configuration)
|
||||
}
|
||||
}
|
||||
fun createRuntimeConfiguration(name: String, project: Project) {
|
||||
if(!project.configurations.any { it.name == name }) {
|
||||
val configuration = project.configurations.create(name)
|
||||
configuration.isTransitive = false
|
||||
project.configurations.single { it.name == "runtime" }.extendsFrom(configuration)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -10,6 +10,8 @@ import java.io.File
|
||||
*/
|
||||
class Cordformation : Plugin<Project> {
|
||||
internal companion object {
|
||||
const val CORDFORMATION_TYPE = "cordformationInternal"
|
||||
|
||||
/**
|
||||
* Gets a resource file from this plugin's JAR file.
|
||||
*
|
||||
@ -31,5 +33,8 @@ class Cordformation : Plugin<Project> {
|
||||
|
||||
override fun apply(project: Project) {
|
||||
Utils.createCompileConfiguration("cordapp", project)
|
||||
Utils.createRuntimeConfiguration(CORDFORMATION_TYPE, project)
|
||||
val jolokiaVersion = project.rootProject.ext<String>("jolokia_version")
|
||||
project.dependencies.add(CORDFORMATION_TYPE, "org.jolokia:jolokia-jvm:$jolokiaVersion:agent")
|
||||
}
|
||||
}
|
||||
|
@ -75,6 +75,7 @@ class Node(private val project: Project) : CordformNode() {
|
||||
if (config.hasPath("webAddress")) {
|
||||
installWebserverJar()
|
||||
}
|
||||
installAgentJar()
|
||||
installBuiltCordapp()
|
||||
installCordapps()
|
||||
installConfig()
|
||||
@ -158,6 +159,29 @@ class Node(private val project: Project) : CordformNode() {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Installs the jolokia monitoring agent JAR to the node/drivers directory
|
||||
*/
|
||||
private fun installAgentJar() {
|
||||
val jolokiaVersion = project.rootProject.ext<String>("jolokia_version")
|
||||
val agentJar = project.configuration("runtime").files {
|
||||
(it.group == "org.jolokia") &&
|
||||
(it.name == "jolokia-jvm") &&
|
||||
(it.version == jolokiaVersion)
|
||||
// TODO: revisit when classifier attribute is added. eg && (it.classifier = "agent")
|
||||
}.first() // should always be the jolokia agent fat jar: eg. jolokia-jvm-1.3.7-agent.jar
|
||||
project.logger.info("Jolokia agent jar: $agentJar")
|
||||
if (agentJar.isFile) {
|
||||
val driversDir = File(nodeDir, "drivers")
|
||||
project.copy {
|
||||
it.apply {
|
||||
from(agentJar)
|
||||
into(driversDir)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Installs the configuration file to this node's directory and detokenises it.
|
||||
*/
|
||||
|
@ -22,6 +22,11 @@ private object debugPortAlloc {
|
||||
internal fun next() = basePort++
|
||||
}
|
||||
|
||||
private object monitoringPortAlloc {
|
||||
private var basePort = 7005
|
||||
internal fun next() = basePort++
|
||||
}
|
||||
|
||||
fun main(args: Array<String>) {
|
||||
val startedProcesses = mutableListOf<Process>()
|
||||
val headless = GraphicsEnvironment.isHeadless() || args.contains(HEADLESS_FLAG)
|
||||
@ -49,8 +54,9 @@ private abstract class JarType(private val jarName: String) {
|
||||
return null
|
||||
}
|
||||
val debugPort = debugPortAlloc.next()
|
||||
val monitoringPort = monitoringPortAlloc.next()
|
||||
println("Starting $jarName in $dir on debug port $debugPort")
|
||||
val process = (if (headless) ::HeadlessJavaCommand else ::TerminalWindowJavaCommand)(jarName, dir, debugPort, javaArgs, jvmArgs).start()
|
||||
val process = (if (headless) ::HeadlessJavaCommand else ::TerminalWindowJavaCommand)(jarName, dir, debugPort, monitoringPort, javaArgs, jvmArgs).start()
|
||||
if (os == OS.MACOS) Thread.sleep(1000)
|
||||
return process
|
||||
}
|
||||
@ -69,15 +75,23 @@ private abstract class JavaCommand(
|
||||
jarName: String,
|
||||
internal val dir: File,
|
||||
debugPort: Int?,
|
||||
monitoringPort: Int?,
|
||||
internal val nodeName: String,
|
||||
init: MutableList<String>.() -> Unit, args: List<String>,
|
||||
jvmArgs: List<String>
|
||||
) {
|
||||
private val jolokiaJar by lazy {
|
||||
File("$dir/drivers").listFiles { _, filename ->
|
||||
filename.matches("jolokia-jvm-.*-agent\\.jar$".toRegex())
|
||||
}.first().name
|
||||
}
|
||||
|
||||
internal val command: List<String> = mutableListOf<String>().apply {
|
||||
add(getJavaPath())
|
||||
addAll(jvmArgs)
|
||||
add("-Dname=$nodeName")
|
||||
null != debugPort && add("-Dcapsule.jvm.args=-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=$debugPort")
|
||||
null != monitoringPort && add("-Dcapsule.jvm.args=-javaagent:drivers/$jolokiaJar=port=$monitoringPort")
|
||||
add("-jar")
|
||||
add(jarName)
|
||||
init()
|
||||
@ -89,14 +103,14 @@ private abstract class JavaCommand(
|
||||
internal abstract fun getJavaPath(): String
|
||||
}
|
||||
|
||||
private class HeadlessJavaCommand(jarName: String, dir: File, debugPort: Int?, args: List<String>, jvmArgs: List<String>)
|
||||
: JavaCommand(jarName, dir, debugPort, dir.name, { add("--no-local-shell") }, args, jvmArgs) {
|
||||
private class HeadlessJavaCommand(jarName: String, dir: File, debugPort: Int?, monitoringPort: Int?, args: List<String>, jvmArgs: List<String>)
|
||||
: JavaCommand(jarName, dir, debugPort, monitoringPort, dir.name, { add("--no-local-shell") }, args, jvmArgs) {
|
||||
override fun processBuilder() = ProcessBuilder(command).redirectError(File("error.$nodeName.log")).inheritIO()
|
||||
override fun getJavaPath() = File(File(System.getProperty("java.home"), "bin"), "java").path
|
||||
}
|
||||
|
||||
private class TerminalWindowJavaCommand(jarName: String, dir: File, debugPort: Int?, args: List<String>, jvmArgs: List<String>)
|
||||
: JavaCommand(jarName, dir, debugPort, "${dir.name}-$jarName", {}, args, jvmArgs) {
|
||||
private class TerminalWindowJavaCommand(jarName: String, dir: File, debugPort: Int?, monitoringPort: Int?, args: List<String>, jvmArgs: List<String>)
|
||||
: JavaCommand(jarName, dir, debugPort, monitoringPort, "${dir.name}-$jarName", {}, args, jvmArgs) {
|
||||
override fun processBuilder() = ProcessBuilder(when (os) {
|
||||
OS.MACOS -> {
|
||||
listOf("osascript", "-e", """tell app "Terminal"
|
||||
|
@ -80,6 +80,7 @@ private fun Config.getSingleValue(path: String, type: KType): Any? {
|
||||
URL::class -> URL(getString(path))
|
||||
CordaX500Name::class -> CordaX500Name.parse(getString(path))
|
||||
Properties::class -> getConfig(path).toProperties()
|
||||
Config::class -> getConfig(path)
|
||||
else -> if (typeClass.java.isEnum) {
|
||||
parseEnum(typeClass.java, getString(path))
|
||||
} else {
|
||||
|
@ -20,7 +20,8 @@ const val NODE_DATABASE_PREFIX = "node_"
|
||||
data class DatabaseConfig(
|
||||
val initialiseSchema: Boolean = true,
|
||||
val serverNameTablePrefix: String = "",
|
||||
val transactionIsolationLevel: TransactionIsolationLevel = TransactionIsolationLevel.REPEATABLE_READ
|
||||
val transactionIsolationLevel: TransactionIsolationLevel = TransactionIsolationLevel.REPEATABLE_READ,
|
||||
val exportHibernateJMXStatistics: Boolean = false
|
||||
)
|
||||
|
||||
// This class forms part of the node config and so any changes to it must be handled with care
|
||||
|
@ -17,8 +17,10 @@ import org.hibernate.type.AbstractSingleColumnStandardBasicType
|
||||
import org.hibernate.type.descriptor.java.PrimitiveByteArrayTypeDescriptor
|
||||
import org.hibernate.type.descriptor.sql.BlobTypeDescriptor
|
||||
import org.hibernate.type.descriptor.sql.VarbinaryTypeDescriptor
|
||||
import java.lang.management.ManagementFactory
|
||||
import java.sql.Connection
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
import javax.management.ObjectName
|
||||
import javax.persistence.AttributeConverter
|
||||
|
||||
class HibernateConfiguration(
|
||||
@ -60,9 +62,31 @@ class HibernateConfiguration(
|
||||
|
||||
val sessionFactory = buildSessionFactory(config, metadataSources, databaseConfig.serverNameTablePrefix)
|
||||
logger.info("Created session factory for schemas: $schemas")
|
||||
|
||||
// export Hibernate JMX statistics
|
||||
if (databaseConfig.exportHibernateJMXStatistics)
|
||||
initStatistics(sessionFactory)
|
||||
|
||||
return sessionFactory
|
||||
}
|
||||
|
||||
// NOTE: workaround suggested to overcome deprecation of StatisticsService (since Hibernate v4.0)
|
||||
// https://stackoverflow.com/questions/23606092/hibernate-upgrade-statisticsservice
|
||||
fun initStatistics(sessionFactory: SessionFactory) {
|
||||
val statsName = ObjectName("org.hibernate:type=statistics")
|
||||
val mbeanServer = ManagementFactory.getPlatformMBeanServer()
|
||||
|
||||
val statisticsMBean = DelegatingStatisticsService(sessionFactory.statistics)
|
||||
statisticsMBean.isStatisticsEnabled = true
|
||||
|
||||
try {
|
||||
mbeanServer.registerMBean(statisticsMBean, statsName)
|
||||
}
|
||||
catch (e: Exception) {
|
||||
logger.warn(e.message)
|
||||
}
|
||||
}
|
||||
|
||||
private fun buildSessionFactory(config: Configuration, metadataSources: MetadataSources, tablePrefix: String): SessionFactory {
|
||||
config.standardServiceRegistryBuilder.applySettings(config.properties)
|
||||
val metadata = metadataSources.getMetadataBuilder(config.standardServiceRegistryBuilder.build()).run {
|
||||
|
@ -0,0 +1,227 @@
|
||||
package net.corda.nodeapi.internal.persistence
|
||||
|
||||
import javax.management.MXBean
|
||||
|
||||
import org.hibernate.stat.Statistics
|
||||
import org.hibernate.stat.SecondLevelCacheStatistics
|
||||
import org.hibernate.stat.QueryStatistics
|
||||
import org.hibernate.stat.NaturalIdCacheStatistics
|
||||
import org.hibernate.stat.EntityStatistics
|
||||
import org.hibernate.stat.CollectionStatistics
|
||||
|
||||
/**
|
||||
* Exposes Hibernate [Statistics] contract as JMX resource.
|
||||
*/
|
||||
@MXBean
|
||||
interface StatisticsService : Statistics
|
||||
|
||||
/**
|
||||
* Implements the MXBean interface by delegating through the actual [Statistics] implementation retrieved from the
|
||||
* session factory.
|
||||
*/
|
||||
class DelegatingStatisticsService(private val delegate: Statistics) : StatisticsService {
|
||||
|
||||
override fun clear() {
|
||||
delegate.clear()
|
||||
}
|
||||
|
||||
override fun getCloseStatementCount(): Long {
|
||||
return delegate.closeStatementCount
|
||||
}
|
||||
|
||||
override fun getCollectionFetchCount(): Long {
|
||||
return delegate.collectionFetchCount
|
||||
}
|
||||
|
||||
override fun getCollectionLoadCount(): Long {
|
||||
return delegate.collectionLoadCount
|
||||
}
|
||||
|
||||
override fun getCollectionRecreateCount(): Long {
|
||||
return delegate.collectionRecreateCount
|
||||
}
|
||||
|
||||
override fun getCollectionRemoveCount(): Long {
|
||||
return delegate.collectionRemoveCount
|
||||
}
|
||||
|
||||
override fun getCollectionRoleNames(): Array<String> {
|
||||
return delegate.collectionRoleNames
|
||||
}
|
||||
|
||||
override fun getCollectionStatistics(arg0: String): CollectionStatistics {
|
||||
return delegate.getCollectionStatistics(arg0)
|
||||
}
|
||||
|
||||
override fun getCollectionUpdateCount(): Long {
|
||||
return delegate.collectionUpdateCount
|
||||
}
|
||||
|
||||
override fun getConnectCount(): Long {
|
||||
return delegate.connectCount
|
||||
}
|
||||
|
||||
override fun getEntityDeleteCount(): Long {
|
||||
return delegate.entityDeleteCount
|
||||
}
|
||||
|
||||
override fun getEntityFetchCount(): Long {
|
||||
return delegate.entityFetchCount
|
||||
}
|
||||
|
||||
override fun getEntityInsertCount(): Long {
|
||||
return delegate.entityInsertCount
|
||||
}
|
||||
|
||||
override fun getEntityLoadCount(): Long {
|
||||
return delegate.entityLoadCount
|
||||
}
|
||||
|
||||
override fun getEntityNames(): Array<String> {
|
||||
return delegate.entityNames
|
||||
}
|
||||
|
||||
override fun getEntityStatistics(arg0: String): EntityStatistics {
|
||||
return delegate.getEntityStatistics(arg0)
|
||||
}
|
||||
|
||||
override fun getEntityUpdateCount(): Long {
|
||||
return delegate.entityUpdateCount
|
||||
}
|
||||
|
||||
override fun getFlushCount(): Long {
|
||||
return delegate.flushCount
|
||||
}
|
||||
|
||||
override fun getNaturalIdCacheHitCount(): Long {
|
||||
return delegate.naturalIdCacheHitCount
|
||||
}
|
||||
|
||||
override fun getNaturalIdCacheMissCount(): Long {
|
||||
return delegate.naturalIdCacheMissCount
|
||||
}
|
||||
|
||||
override fun getNaturalIdCachePutCount(): Long {
|
||||
return delegate.naturalIdCachePutCount
|
||||
}
|
||||
|
||||
override fun getNaturalIdCacheStatistics(arg0: String): NaturalIdCacheStatistics {
|
||||
return delegate.getNaturalIdCacheStatistics(arg0)
|
||||
}
|
||||
|
||||
override fun getNaturalIdQueryExecutionCount(): Long {
|
||||
return delegate.naturalIdQueryExecutionCount
|
||||
}
|
||||
|
||||
override fun getNaturalIdQueryExecutionMaxTime(): Long {
|
||||
return delegate.naturalIdQueryExecutionMaxTime
|
||||
}
|
||||
|
||||
override fun getNaturalIdQueryExecutionMaxTimeRegion(): String {
|
||||
return delegate.naturalIdQueryExecutionMaxTimeRegion
|
||||
}
|
||||
|
||||
override fun getOptimisticFailureCount(): Long {
|
||||
return delegate.optimisticFailureCount
|
||||
}
|
||||
|
||||
override fun getPrepareStatementCount(): Long {
|
||||
return delegate.prepareStatementCount
|
||||
}
|
||||
|
||||
override fun getQueries(): Array<String> {
|
||||
return delegate.queries
|
||||
}
|
||||
|
||||
override fun getQueryCacheHitCount(): Long {
|
||||
return delegate.queryCacheHitCount
|
||||
}
|
||||
|
||||
override fun getQueryCacheMissCount(): Long {
|
||||
return delegate.queryCacheMissCount
|
||||
}
|
||||
|
||||
override fun getQueryCachePutCount(): Long {
|
||||
return delegate.queryCachePutCount
|
||||
}
|
||||
|
||||
override fun getQueryExecutionCount(): Long {
|
||||
return delegate.queryExecutionCount
|
||||
}
|
||||
|
||||
override fun getQueryExecutionMaxTime(): Long {
|
||||
return delegate.queryExecutionMaxTime
|
||||
}
|
||||
|
||||
override fun getQueryExecutionMaxTimeQueryString(): String {
|
||||
return delegate.queryExecutionMaxTimeQueryString
|
||||
}
|
||||
|
||||
override fun getQueryStatistics(arg0: String): QueryStatistics {
|
||||
return delegate.getQueryStatistics(arg0)
|
||||
}
|
||||
|
||||
override fun getSecondLevelCacheHitCount(): Long {
|
||||
return delegate.secondLevelCacheHitCount
|
||||
}
|
||||
|
||||
override fun getSecondLevelCacheMissCount(): Long {
|
||||
return delegate.secondLevelCacheMissCount
|
||||
}
|
||||
|
||||
override fun getSecondLevelCachePutCount(): Long {
|
||||
return delegate.secondLevelCachePutCount
|
||||
}
|
||||
|
||||
override fun getSecondLevelCacheRegionNames(): Array<String> {
|
||||
return delegate.secondLevelCacheRegionNames
|
||||
}
|
||||
|
||||
override fun getSecondLevelCacheStatistics(arg0: String): SecondLevelCacheStatistics {
|
||||
return delegate.getSecondLevelCacheStatistics(arg0)
|
||||
}
|
||||
|
||||
override fun getSessionCloseCount(): Long {
|
||||
return delegate.sessionCloseCount
|
||||
}
|
||||
|
||||
override fun getSessionOpenCount(): Long {
|
||||
return delegate.sessionOpenCount
|
||||
}
|
||||
|
||||
override fun getStartTime(): Long {
|
||||
return delegate.startTime
|
||||
}
|
||||
|
||||
override fun getSuccessfulTransactionCount(): Long {
|
||||
return delegate.successfulTransactionCount
|
||||
}
|
||||
|
||||
override fun getTransactionCount(): Long {
|
||||
return delegate.transactionCount
|
||||
}
|
||||
|
||||
override fun getUpdateTimestampsCacheHitCount(): Long {
|
||||
return delegate.updateTimestampsCacheHitCount
|
||||
}
|
||||
|
||||
override fun getUpdateTimestampsCacheMissCount(): Long {
|
||||
return delegate.updateTimestampsCacheMissCount
|
||||
}
|
||||
|
||||
override fun getUpdateTimestampsCachePutCount(): Long {
|
||||
return delegate.updateTimestampsCachePutCount
|
||||
}
|
||||
|
||||
override fun isStatisticsEnabled(): Boolean {
|
||||
return delegate.isStatisticsEnabled
|
||||
}
|
||||
|
||||
override fun logSummary() {
|
||||
delegate.logSummary()
|
||||
}
|
||||
|
||||
override fun setStatisticsEnabled(arg0: Boolean) {
|
||||
delegate.isStatisticsEnabled = arg0
|
||||
}
|
||||
}
|
@ -27,9 +27,9 @@ import java.util.*
|
||||
* transformation rules we create a mapping between those values and the values that exist on the
|
||||
* current class
|
||||
*
|
||||
* @property clazz The enum as it exists now, not as it did when it was serialized (either in the past
|
||||
* @property type The enum as it exists now, not as it did when it was serialized (either in the past
|
||||
* or future).
|
||||
* @property factory the [SerializerFactory] that is building this serialization object.
|
||||
* @param factory the [SerializerFactory] that is building this serialization object.
|
||||
* @property conversions A mapping between all potential enum constants that could've been assigned to
|
||||
* an instance of the enum as it existed at time of serialisation and those that exist now
|
||||
* @property ordinals Convenience mapping of constant to ordinality
|
||||
@ -57,7 +57,7 @@ class EnumEvolutionSerializer(
|
||||
* received AMQP header
|
||||
* @param new The Serializer object we built based on the current state of the enum class on our classpath
|
||||
* @param factory the [SerializerFactory] that is building this serialization object.
|
||||
* @param transformsFromBlob the transforms attached to the class in the AMQP header, i.e. the transforms
|
||||
* @param schemas the transforms attached to the class in the AMQP header, i.e. the transforms
|
||||
* known at serialization time
|
||||
*/
|
||||
fun make(old: RestrictedType,
|
||||
|
@ -32,17 +32,16 @@ sealed class PropertySerializer(val name: String, val readMethod: Method?, val r
|
||||
return if (isInterface) listOf(SerializerFactory.nameForType(resolvedType)) else emptyList()
|
||||
}
|
||||
|
||||
private fun generateDefault(): String? {
|
||||
if (isJVMPrimitive) {
|
||||
return when (resolvedType) {
|
||||
java.lang.Boolean.TYPE -> "false"
|
||||
java.lang.Character.TYPE -> "�"
|
||||
else -> "0"
|
||||
private fun generateDefault(): String? =
|
||||
if (isJVMPrimitive) {
|
||||
when (resolvedType) {
|
||||
java.lang.Boolean.TYPE -> "false"
|
||||
java.lang.Character.TYPE -> "�"
|
||||
else -> "0"
|
||||
}
|
||||
} else {
|
||||
null
|
||||
}
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
private fun generateMandatory(): Boolean {
|
||||
return isJVMPrimitive || readMethod?.returnsNullable() == false
|
||||
|
@ -28,7 +28,7 @@ enum class TransformTypes(val build: (Annotation) -> Transform) : DescribedType
|
||||
Unknown({ UnknownTransform() }) {
|
||||
override fun getDescriptor(): Any = DESCRIPTOR
|
||||
override fun getDescribed(): Any = ordinal
|
||||
override fun validate(l : List<Transform>, constants: Map<String, Int>) { }
|
||||
override fun validate(list: List<Transform>, constants: Map<String, Int>) {}
|
||||
},
|
||||
EnumDefault({ a -> EnumDefaultSchemaTransform((a as CordaSerializationTransformEnumDefault).old, a.new) }) {
|
||||
override fun getDescriptor(): Any = DESCRIPTOR
|
||||
@ -37,13 +37,13 @@ enum class TransformTypes(val build: (Annotation) -> Transform) : DescribedType
|
||||
/**
|
||||
* Validates a list of constant additions to an enumerated type. To be valid a default (the value
|
||||
* that should be used when we cannot use the new value) must refer to a constant that exists in the
|
||||
* enum class as it exists now and it cannot refer to itself.
|
||||
* enum class as it exists now and it cannot refer to itself.
|
||||
*
|
||||
* @param l The list of transforms representing new constants and the mapping from that constant to an
|
||||
* @param list The list of transforms representing new constants and the mapping from that constant to an
|
||||
* existing value
|
||||
* @param constants The list of enum constants on the type the transforms are being applied to
|
||||
*/
|
||||
override fun validate(list : List<Transform>, constants: Map<String, Int>) {
|
||||
override fun validate(list: List<Transform>, constants: Map<String, Int>) {
|
||||
uncheckedCast<List<Transform>, List<EnumDefaultSchemaTransform>>(list).forEach {
|
||||
if (!constants.contains(it.new)) {
|
||||
throw NotSerializableException("Unknown enum constant ${it.new}")
|
||||
@ -62,7 +62,7 @@ enum class TransformTypes(val build: (Annotation) -> Transform) : DescribedType
|
||||
if (constants[it.old]!! >= constants[it.new]!!) {
|
||||
throw NotSerializableException(
|
||||
"Enum extensions must default to older constants. ${it.new}[${constants[it.new]}] " +
|
||||
"defaults to ${it.old}[${constants[it.old]}] which is greater")
|
||||
"defaults to ${it.old}[${constants[it.old]}] which is greater")
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -76,15 +76,16 @@ enum class TransformTypes(val build: (Annotation) -> Transform) : DescribedType
|
||||
* that is a constant is renamed to something that used to exist in the enum. We do this for both
|
||||
* the same constant (i.e. C -> D -> C) and multiple constants (C->D, B->C)
|
||||
*
|
||||
* @param l The list of transforms representing the renamed constants and the mapping between their new
|
||||
* @param list The list of transforms representing the renamed constants and the mapping between their new
|
||||
* and old values
|
||||
* @param constants The list of enum constants on the type the transforms are being applied to
|
||||
*/
|
||||
override fun validate(l : List<Transform>, constants: Map<String, Int>) {
|
||||
override fun validate(list: List<Transform>, constants: Map<String, Int>) {
|
||||
object : Any() {
|
||||
val from : MutableSet<String> = mutableSetOf()
|
||||
val to : MutableSet<String> = mutableSetOf() }.apply {
|
||||
@Suppress("UNCHECKED_CAST") (l as List<RenameSchemaTransform>).forEach { rename ->
|
||||
val from: MutableSet<String> = mutableSetOf()
|
||||
val to: MutableSet<String> = mutableSetOf()
|
||||
}.apply {
|
||||
@Suppress("UNCHECKED_CAST") (list as List<RenameSchemaTransform>).forEach { rename ->
|
||||
if (rename.to in this.to || rename.from in this.from) {
|
||||
throw NotSerializableException("Cyclic renames are not allowed (${rename.to})")
|
||||
}
|
||||
@ -104,7 +105,7 @@ enum class TransformTypes(val build: (Annotation) -> Transform) : DescribedType
|
||||
//}
|
||||
;
|
||||
|
||||
abstract fun validate(l: List<Transform>, constants: Map<String, Int>)
|
||||
abstract fun validate(list: List<Transform>, constants: Map<String, Int>)
|
||||
|
||||
companion object : DescribedTypeConstructor<TransformTypes> {
|
||||
val DESCRIPTOR = AMQPDescriptorRegistry.TRANSFORM_ELEMENT_KEY.amqpDescriptor
|
||||
|
@ -92,7 +92,7 @@ class UnknownTestTransform(val a: Int, val b: Int, val c: Int) : Transform() {
|
||||
companion object : DescribedTypeConstructor<UnknownTestTransform> {
|
||||
val typeName = "UnknownTest"
|
||||
|
||||
override fun newInstance(obj: Any?) : UnknownTestTransform {
|
||||
override fun newInstance(obj: Any?): UnknownTestTransform {
|
||||
val described = obj as List<*>
|
||||
return UnknownTestTransform(described[1] as Int, described[2] as Int, described[3] as Int)
|
||||
}
|
||||
@ -201,41 +201,41 @@ data class TransformsSchema(val types: Map<String, EnumMap<TransformTypes, Mutab
|
||||
* class loader and this dictates which classes we can and cannot see
|
||||
*/
|
||||
fun get(name: String, sf: SerializerFactory) = sf.transformsCache.computeIfAbsent(name) {
|
||||
val transforms = EnumMap<TransformTypes, MutableList<Transform>>(TransformTypes::class.java)
|
||||
try {
|
||||
val clazz = sf.classloader.loadClass(name)
|
||||
val transforms = EnumMap<TransformTypes, MutableList<Transform>>(TransformTypes::class.java)
|
||||
try {
|
||||
val clazz = sf.classloader.loadClass(name)
|
||||
|
||||
supportedTransforms.forEach { transform ->
|
||||
clazz.getAnnotation(transform.type)?.let { list ->
|
||||
transform.getAnnotations(list).forEach { annotation ->
|
||||
val t = transform.enum.build(annotation)
|
||||
supportedTransforms.forEach { transform ->
|
||||
clazz.getAnnotation(transform.type)?.let { list ->
|
||||
transform.getAnnotations(list).forEach { annotation ->
|
||||
val t = transform.enum.build(annotation)
|
||||
|
||||
// we're explicitly rejecting repeated annotations, whilst it's fine and we'd just
|
||||
// ignore them it feels like a good thing to alert the user to since this is
|
||||
// more than likely a typo in their code so best make it an actual error
|
||||
if (transforms.computeIfAbsent(transform.enum) { mutableListOf() }
|
||||
.filter { t == it }
|
||||
.isNotEmpty()) {
|
||||
throw NotSerializableException(
|
||||
"Repeated unique transformation annotation of type ${t.name}")
|
||||
}
|
||||
|
||||
transforms[transform.enum]!!.add(t)
|
||||
// we're explicitly rejecting repeated annotations, whilst it's fine and we'd just
|
||||
// ignore them it feels like a good thing to alert the user to since this is
|
||||
// more than likely a typo in their code so best make it an actual error
|
||||
if (transforms.computeIfAbsent(transform.enum) { mutableListOf() }
|
||||
.filter { t == it }
|
||||
.isNotEmpty()) {
|
||||
throw NotSerializableException(
|
||||
"Repeated unique transformation annotation of type ${t.name}")
|
||||
}
|
||||
|
||||
transform.enum.validate(
|
||||
transforms[transform.enum] ?: emptyList(),
|
||||
clazz.enumConstants.mapIndexed { i, s -> Pair(s.toString(), i) }.toMap())
|
||||
transforms[transform.enum]!!.add(t)
|
||||
}
|
||||
}
|
||||
} catch (_: ClassNotFoundException) {
|
||||
// if we can't load the class we'll end up caching an empty list which is fine as that
|
||||
// list, on lookup, won't be included in the schema because it's empty
|
||||
}
|
||||
|
||||
transforms
|
||||
transform.enum.validate(
|
||||
transforms[transform.enum] ?: emptyList(),
|
||||
clazz.enumConstants.mapIndexed { i, s -> Pair(s.toString(), i) }.toMap())
|
||||
}
|
||||
}
|
||||
} catch (_: ClassNotFoundException) {
|
||||
// if we can't load the class we'll end up caching an empty list which is fine as that
|
||||
// list, on lookup, won't be included in the schema because it's empty
|
||||
}
|
||||
|
||||
transforms
|
||||
}
|
||||
|
||||
private fun getAndAdd(
|
||||
type: String,
|
||||
sf: SerializerFactory,
|
||||
|
@ -6,7 +6,6 @@ import org.assertj.core.api.Assertions
|
||||
import org.junit.Test
|
||||
import java.io.File
|
||||
import java.io.NotSerializableException
|
||||
import java.net.URI
|
||||
import kotlin.test.assertEquals
|
||||
import kotlin.test.assertTrue
|
||||
|
||||
@ -47,7 +46,7 @@ class EnumEvolvabilityTests {
|
||||
|
||||
@Test
|
||||
fun noAnnotation() {
|
||||
data class C (val n: NotAnnotated)
|
||||
data class C(val n: NotAnnotated)
|
||||
|
||||
val sf = testDefaultFactory()
|
||||
val bAndS = TestSerializationOutput(VERBOSE, sf).serializeAndReturnSchema(C(NotAnnotated.A))
|
||||
@ -63,7 +62,7 @@ class EnumEvolvabilityTests {
|
||||
|
||||
@Test
|
||||
fun missingDefaults() {
|
||||
data class C (val m: MissingDefaults)
|
||||
data class C(val m: MissingDefaults)
|
||||
|
||||
val sf = testDefaultFactory()
|
||||
val bAndS = TestSerializationOutput(VERBOSE, sf).serializeAndReturnSchema(C(MissingDefaults.A))
|
||||
@ -74,7 +73,7 @@ class EnumEvolvabilityTests {
|
||||
|
||||
@Test
|
||||
fun missingRenames() {
|
||||
data class C (val m: MissingRenames)
|
||||
data class C(val m: MissingRenames)
|
||||
|
||||
val sf = testDefaultFactory()
|
||||
val bAndS = TestSerializationOutput(VERBOSE, sf).serializeAndReturnSchema(C(MissingRenames.A))
|
||||
@ -86,7 +85,7 @@ class EnumEvolvabilityTests {
|
||||
|
||||
@Test
|
||||
fun defaultAnnotationIsAddedToEnvelope() {
|
||||
data class C (val annotatedEnum: AnnotatedEnumOnce)
|
||||
data class C(val annotatedEnum: AnnotatedEnumOnce)
|
||||
|
||||
val sf = testDefaultFactory()
|
||||
val bAndS = TestSerializationOutput(VERBOSE, sf).serializeAndReturnSchema(C(AnnotatedEnumOnce.D))
|
||||
@ -94,45 +93,45 @@ class EnumEvolvabilityTests {
|
||||
// only the enum is decorated so schema sizes should be different (2 objects, only one evolved)
|
||||
assertEquals(2, bAndS.schema.types.size)
|
||||
assertEquals(1, bAndS.transformsSchema.types.size)
|
||||
assertEquals (AnnotatedEnumOnce::class.java.name, bAndS.transformsSchema.types.keys.first())
|
||||
assertEquals(AnnotatedEnumOnce::class.java.name, bAndS.transformsSchema.types.keys.first())
|
||||
|
||||
val schema = bAndS.transformsSchema.types.values.first()
|
||||
|
||||
assertEquals(1, schema.size)
|
||||
assertTrue (schema.keys.contains(TransformTypes.EnumDefault))
|
||||
assertEquals (1, schema[TransformTypes.EnumDefault]!!.size)
|
||||
assertTrue (schema[TransformTypes.EnumDefault]!![0] is EnumDefaultSchemaTransform)
|
||||
assertEquals ("D", (schema[TransformTypes.EnumDefault]!![0] as EnumDefaultSchemaTransform).new)
|
||||
assertEquals ("A", (schema[TransformTypes.EnumDefault]!![0] as EnumDefaultSchemaTransform).old)
|
||||
assertTrue(schema.keys.contains(TransformTypes.EnumDefault))
|
||||
assertEquals(1, schema[TransformTypes.EnumDefault]!!.size)
|
||||
assertTrue(schema[TransformTypes.EnumDefault]!![0] is EnumDefaultSchemaTransform)
|
||||
assertEquals("D", (schema[TransformTypes.EnumDefault]!![0] as EnumDefaultSchemaTransform).new)
|
||||
assertEquals("A", (schema[TransformTypes.EnumDefault]!![0] as EnumDefaultSchemaTransform).old)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun doubleDefaultAnnotationIsAddedToEnvelope() {
|
||||
data class C (val annotatedEnum: AnnotatedEnumTwice)
|
||||
data class C(val annotatedEnum: AnnotatedEnumTwice)
|
||||
|
||||
val sf = testDefaultFactory()
|
||||
val bAndS = TestSerializationOutput(VERBOSE, sf).serializeAndReturnSchema(C(AnnotatedEnumTwice.E))
|
||||
|
||||
assertEquals(2, bAndS.schema.types.size)
|
||||
assertEquals(1, bAndS.transformsSchema.types.size)
|
||||
assertEquals (AnnotatedEnumTwice::class.java.name, bAndS.transformsSchema.types.keys.first())
|
||||
assertEquals(AnnotatedEnumTwice::class.java.name, bAndS.transformsSchema.types.keys.first())
|
||||
|
||||
val schema = bAndS.transformsSchema.types.values.first()
|
||||
|
||||
assertEquals(1, schema.size)
|
||||
assertTrue (schema.keys.contains(TransformTypes.EnumDefault))
|
||||
assertEquals (2, schema[TransformTypes.EnumDefault]!!.size)
|
||||
assertTrue (schema[TransformTypes.EnumDefault]!![0] is EnumDefaultSchemaTransform)
|
||||
assertEquals ("E", (schema[TransformTypes.EnumDefault]!![0] as EnumDefaultSchemaTransform).new)
|
||||
assertEquals ("D", (schema[TransformTypes.EnumDefault]!![0] as EnumDefaultSchemaTransform).old)
|
||||
assertTrue (schema[TransformTypes.EnumDefault]!![1] is EnumDefaultSchemaTransform)
|
||||
assertEquals ("D", (schema[TransformTypes.EnumDefault]!![1] as EnumDefaultSchemaTransform).new)
|
||||
assertEquals ("A", (schema[TransformTypes.EnumDefault]!![1] as EnumDefaultSchemaTransform).old)
|
||||
assertTrue(schema.keys.contains(TransformTypes.EnumDefault))
|
||||
assertEquals(2, schema[TransformTypes.EnumDefault]!!.size)
|
||||
assertTrue(schema[TransformTypes.EnumDefault]!![0] is EnumDefaultSchemaTransform)
|
||||
assertEquals("E", (schema[TransformTypes.EnumDefault]!![0] as EnumDefaultSchemaTransform).new)
|
||||
assertEquals("D", (schema[TransformTypes.EnumDefault]!![0] as EnumDefaultSchemaTransform).old)
|
||||
assertTrue(schema[TransformTypes.EnumDefault]!![1] is EnumDefaultSchemaTransform)
|
||||
assertEquals("D", (schema[TransformTypes.EnumDefault]!![1] as EnumDefaultSchemaTransform).new)
|
||||
assertEquals("A", (schema[TransformTypes.EnumDefault]!![1] as EnumDefaultSchemaTransform).old)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun defaultAnnotationIsAddedToEnvelopeAndDeserialised() {
|
||||
data class C (val annotatedEnum: AnnotatedEnumOnce)
|
||||
data class C(val annotatedEnum: AnnotatedEnumOnce)
|
||||
|
||||
val sf = testDefaultFactory()
|
||||
val sb = TestSerializationOutput(VERBOSE, sf).serialize(C(AnnotatedEnumOnce.D))
|
||||
@ -152,11 +151,11 @@ class EnumEvolvabilityTests {
|
||||
|
||||
val schema = transforms[eName]
|
||||
|
||||
assertTrue (schema!!.keys.contains(TransformTypes.EnumDefault))
|
||||
assertEquals (1, schema[TransformTypes.EnumDefault]!!.size)
|
||||
assertTrue (schema[TransformTypes.EnumDefault]!![0] is EnumDefaultSchemaTransform)
|
||||
assertEquals ("D", (schema[TransformTypes.EnumDefault]!![0] as EnumDefaultSchemaTransform).new)
|
||||
assertEquals ("A", (schema[TransformTypes.EnumDefault]!![0] as EnumDefaultSchemaTransform).old)
|
||||
assertTrue(schema!!.keys.contains(TransformTypes.EnumDefault))
|
||||
assertEquals(1, schema[TransformTypes.EnumDefault]!!.size)
|
||||
assertTrue(schema[TransformTypes.EnumDefault]!![0] is EnumDefaultSchemaTransform)
|
||||
assertEquals("D", (schema[TransformTypes.EnumDefault]!![0] as EnumDefaultSchemaTransform).new)
|
||||
assertEquals("A", (schema[TransformTypes.EnumDefault]!![0] as EnumDefaultSchemaTransform).old)
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -174,9 +173,9 @@ class EnumEvolvabilityTests {
|
||||
|
||||
val transforms = db.envelope.transformsSchema.types
|
||||
|
||||
assertTrue (transforms.contains(AnnotatedEnumTwice::class.java.name))
|
||||
assertTrue (transforms[AnnotatedEnumTwice::class.java.name]!!.contains(TransformTypes.EnumDefault))
|
||||
assertEquals (2, transforms[AnnotatedEnumTwice::class.java.name]!![TransformTypes.EnumDefault]!!.size)
|
||||
assertTrue(transforms.contains(AnnotatedEnumTwice::class.java.name))
|
||||
assertTrue(transforms[AnnotatedEnumTwice::class.java.name]!!.contains(TransformTypes.EnumDefault))
|
||||
assertEquals(2, transforms[AnnotatedEnumTwice::class.java.name]!![TransformTypes.EnumDefault]!!.size)
|
||||
|
||||
val enumDefaults = transforms[AnnotatedEnumTwice::class.java.name]!![TransformTypes.EnumDefault]!!
|
||||
|
||||
@ -188,7 +187,7 @@ class EnumEvolvabilityTests {
|
||||
|
||||
@Test
|
||||
fun renameAnnotationIsAdded() {
|
||||
data class C (val annotatedEnum: RenameEnumOnce)
|
||||
data class C(val annotatedEnum: RenameEnumOnce)
|
||||
|
||||
val sf = testDefaultFactory()
|
||||
|
||||
@ -197,7 +196,7 @@ class EnumEvolvabilityTests {
|
||||
|
||||
assertEquals(2, bAndS.schema.types.size)
|
||||
assertEquals(1, bAndS.transformsSchema.types.size)
|
||||
assertEquals (RenameEnumOnce::class.java.name, bAndS.transformsSchema.types.keys.first())
|
||||
assertEquals(RenameEnumOnce::class.java.name, bAndS.transformsSchema.types.keys.first())
|
||||
|
||||
val serialisedSchema = bAndS.transformsSchema.types[RenameEnumOnce::class.java.name]!!
|
||||
|
||||
@ -212,7 +211,7 @@ class EnumEvolvabilityTests {
|
||||
|
||||
assertEquals(2, cAndS.envelope.schema.types.size)
|
||||
assertEquals(1, cAndS.envelope.transformsSchema.types.size)
|
||||
assertEquals (RenameEnumOnce::class.java.name, cAndS.envelope.transformsSchema.types.keys.first())
|
||||
assertEquals(RenameEnumOnce::class.java.name, cAndS.envelope.transformsSchema.types.keys.first())
|
||||
|
||||
val deserialisedSchema = cAndS.envelope.transformsSchema.types[RenameEnumOnce::class.java.name]!!
|
||||
|
||||
@ -232,7 +231,7 @@ class EnumEvolvabilityTests {
|
||||
|
||||
@Test
|
||||
fun doubleRenameAnnotationIsAdded() {
|
||||
data class C (val annotatedEnum: RenameEnumTwice)
|
||||
data class C(val annotatedEnum: RenameEnumTwice)
|
||||
|
||||
val sf = testDefaultFactory()
|
||||
|
||||
@ -241,7 +240,7 @@ class EnumEvolvabilityTests {
|
||||
|
||||
assertEquals(2, bAndS.schema.types.size)
|
||||
assertEquals(1, bAndS.transformsSchema.types.size)
|
||||
assertEquals (RenameEnumTwice::class.java.name, bAndS.transformsSchema.types.keys.first())
|
||||
assertEquals(RenameEnumTwice::class.java.name, bAndS.transformsSchema.types.keys.first())
|
||||
|
||||
val serialisedSchema = bAndS.transformsSchema.types[RenameEnumTwice::class.java.name]!!
|
||||
|
||||
@ -258,7 +257,7 @@ class EnumEvolvabilityTests {
|
||||
|
||||
assertEquals(2, cAndS.envelope.schema.types.size)
|
||||
assertEquals(1, cAndS.envelope.transformsSchema.types.size)
|
||||
assertEquals (RenameEnumTwice::class.java.name, cAndS.envelope.transformsSchema.types.keys.first())
|
||||
assertEquals(RenameEnumTwice::class.java.name, cAndS.envelope.transformsSchema.types.keys.first())
|
||||
|
||||
val deserialisedSchema = cAndS.envelope.transformsSchema.types[RenameEnumTwice::class.java.name]!!
|
||||
|
||||
@ -271,15 +270,15 @@ class EnumEvolvabilityTests {
|
||||
assertEquals("F", (deserialisedSchema[TransformTypes.Rename]!![1] as RenameSchemaTransform).to)
|
||||
}
|
||||
|
||||
@CordaSerializationTransformRename(from="A", to="X")
|
||||
@CordaSerializationTransformEnumDefault(old = "X", new="E")
|
||||
@CordaSerializationTransformRename(from = "A", to = "X")
|
||||
@CordaSerializationTransformEnumDefault(old = "X", new = "E")
|
||||
enum class RenameAndExtendEnum {
|
||||
X, B, C, D, E
|
||||
}
|
||||
|
||||
@Test
|
||||
fun bothAnnotationTypes() {
|
||||
data class C (val annotatedEnum: RenameAndExtendEnum)
|
||||
data class C(val annotatedEnum: RenameAndExtendEnum)
|
||||
|
||||
val sf = testDefaultFactory()
|
||||
|
||||
@ -288,15 +287,15 @@ class EnumEvolvabilityTests {
|
||||
|
||||
assertEquals(2, bAndS.schema.types.size)
|
||||
assertEquals(1, bAndS.transformsSchema.types.size)
|
||||
assertEquals (RenameAndExtendEnum::class.java.name, bAndS.transformsSchema.types.keys.first())
|
||||
assertEquals(RenameAndExtendEnum::class.java.name, bAndS.transformsSchema.types.keys.first())
|
||||
|
||||
val serialisedSchema = bAndS.transformsSchema.types[RenameAndExtendEnum::class.java.name]!!
|
||||
|
||||
// This time there should be two distinct transform types (all previous tests have had only
|
||||
// a single type
|
||||
assertEquals(2, serialisedSchema.size)
|
||||
assertTrue (serialisedSchema.containsKey(TransformTypes.Rename))
|
||||
assertTrue (serialisedSchema.containsKey(TransformTypes.EnumDefault))
|
||||
assertTrue(serialisedSchema.containsKey(TransformTypes.Rename))
|
||||
assertTrue(serialisedSchema.containsKey(TransformTypes.EnumDefault))
|
||||
|
||||
assertEquals(1, serialisedSchema[TransformTypes.Rename]!!.size)
|
||||
assertEquals("A", (serialisedSchema[TransformTypes.Rename]!![0] as RenameSchemaTransform).from)
|
||||
@ -307,7 +306,7 @@ class EnumEvolvabilityTests {
|
||||
assertEquals("X", (serialisedSchema[TransformTypes.EnumDefault]!![0] as EnumDefaultSchemaTransform).old)
|
||||
}
|
||||
|
||||
@CordaSerializationTransformEnumDefaults (
|
||||
@CordaSerializationTransformEnumDefaults(
|
||||
CordaSerializationTransformEnumDefault("D", "A"),
|
||||
CordaSerializationTransformEnumDefault("D", "A"))
|
||||
enum class RepeatedAnnotation {
|
||||
@ -316,7 +315,7 @@ class EnumEvolvabilityTests {
|
||||
|
||||
@Test
|
||||
fun repeatedAnnotation() {
|
||||
data class C (val a: RepeatedAnnotation)
|
||||
data class C(val a: RepeatedAnnotation)
|
||||
|
||||
val sf = testDefaultFactory()
|
||||
|
||||
@ -330,40 +329,40 @@ class EnumEvolvabilityTests {
|
||||
A, B, C, D
|
||||
}
|
||||
|
||||
@CordaSerializationTransformEnumDefaults (
|
||||
@CordaSerializationTransformEnumDefaults(
|
||||
CordaSerializationTransformEnumDefault("D", "A"),
|
||||
CordaSerializationTransformEnumDefault("E", "A"))
|
||||
enum class E2 {
|
||||
A, B, C, D, E
|
||||
}
|
||||
|
||||
@CordaSerializationTransformEnumDefaults (CordaSerializationTransformEnumDefault("D", "A"))
|
||||
@CordaSerializationTransformEnumDefaults(CordaSerializationTransformEnumDefault("D", "A"))
|
||||
enum class E3 {
|
||||
A, B, C, D
|
||||
}
|
||||
|
||||
@Test
|
||||
fun multiEnums() {
|
||||
data class A (val a: E1, val b: E2)
|
||||
data class B (val a: E3, val b: A, val c: E1)
|
||||
data class C (val a: B, val b: E2, val c: E3)
|
||||
data class A(val a: E1, val b: E2)
|
||||
data class B(val a: E3, val b: A, val c: E1)
|
||||
data class C(val a: B, val b: E2, val c: E3)
|
||||
|
||||
val c = C(B(E3.A,A(E1.A,E2.B),E1.C),E2.B,E3.A)
|
||||
val c = C(B(E3.A, A(E1.A, E2.B), E1.C), E2.B, E3.A)
|
||||
|
||||
val sf = testDefaultFactory()
|
||||
|
||||
// Serialise the object
|
||||
val bAndS = TestSerializationOutput(VERBOSE, sf).serializeAndReturnSchema(c)
|
||||
|
||||
println (bAndS.transformsSchema)
|
||||
println(bAndS.transformsSchema)
|
||||
|
||||
// we have six types and three of those, the enums, should have transforms
|
||||
assertEquals(6, bAndS.schema.types.size)
|
||||
assertEquals(3, bAndS.transformsSchema.types.size)
|
||||
|
||||
assertTrue (E1::class.java.name in bAndS.transformsSchema.types)
|
||||
assertTrue (E2::class.java.name in bAndS.transformsSchema.types)
|
||||
assertTrue (E3::class.java.name in bAndS.transformsSchema.types)
|
||||
assertTrue(E1::class.java.name in bAndS.transformsSchema.types)
|
||||
assertTrue(E2::class.java.name in bAndS.transformsSchema.types)
|
||||
assertTrue(E3::class.java.name in bAndS.transformsSchema.types)
|
||||
|
||||
val e1S = bAndS.transformsSchema.types[E1::class.java.name]!!
|
||||
val e2S = bAndS.transformsSchema.types[E2::class.java.name]!!
|
||||
@ -404,7 +403,7 @@ class EnumEvolvabilityTests {
|
||||
assertTrue(sf.transformsCache.containsKey(C2::class.java.name))
|
||||
assertTrue(sf.transformsCache.containsKey(AnnotatedEnumOnce::class.java.name))
|
||||
|
||||
assertEquals (sb1.transformsSchema.types[AnnotatedEnumOnce::class.java.name],
|
||||
assertEquals(sb1.transformsSchema.types[AnnotatedEnumOnce::class.java.name],
|
||||
sb2.transformsSchema.types[AnnotatedEnumOnce::class.java.name])
|
||||
}
|
||||
|
||||
@ -447,7 +446,7 @@ class EnumEvolvabilityTests {
|
||||
//
|
||||
// And we're not at 3. However, we ban this rename
|
||||
//
|
||||
@CordaSerializationTransformRenames (
|
||||
@CordaSerializationTransformRenames(
|
||||
CordaSerializationTransformRename("D", "C"),
|
||||
CordaSerializationTransformRename("C", "D")
|
||||
)
|
||||
@ -455,7 +454,7 @@ class EnumEvolvabilityTests {
|
||||
|
||||
@Test
|
||||
fun rejectCyclicRename() {
|
||||
data class C (val e: RejectCyclicRename)
|
||||
data class C(val e: RejectCyclicRename)
|
||||
|
||||
val sf = testDefaultFactory()
|
||||
Assertions.assertThatThrownBy {
|
||||
@ -468,7 +467,7 @@ class EnumEvolvabilityTests {
|
||||
// unserailzble. However, in this case, it isn't a struct cycle, rather one element
|
||||
// is renamed to match what a different element used to be called
|
||||
//
|
||||
@CordaSerializationTransformRenames (
|
||||
@CordaSerializationTransformRenames(
|
||||
CordaSerializationTransformRename(from = "B", to = "C"),
|
||||
CordaSerializationTransformRename(from = "C", to = "D")
|
||||
)
|
||||
@ -476,7 +475,7 @@ class EnumEvolvabilityTests {
|
||||
|
||||
@Test
|
||||
fun rejectCyclicRenameAlt() {
|
||||
data class C (val e: RejectCyclicRenameAlt)
|
||||
data class C(val e: RejectCyclicRenameAlt)
|
||||
|
||||
val sf = testDefaultFactory()
|
||||
Assertions.assertThatThrownBy {
|
||||
@ -484,7 +483,7 @@ class EnumEvolvabilityTests {
|
||||
}.isInstanceOf(NotSerializableException::class.java)
|
||||
}
|
||||
|
||||
@CordaSerializationTransformRenames (
|
||||
@CordaSerializationTransformRenames(
|
||||
CordaSerializationTransformRename("G", "C"),
|
||||
CordaSerializationTransformRename("F", "G"),
|
||||
CordaSerializationTransformRename("E", "F"),
|
||||
@ -495,7 +494,7 @@ class EnumEvolvabilityTests {
|
||||
|
||||
@Test
|
||||
fun rejectCyclicRenameRedux() {
|
||||
data class C (val e: RejectCyclicRenameRedux)
|
||||
data class C(val e: RejectCyclicRenameRedux)
|
||||
|
||||
val sf = testDefaultFactory()
|
||||
Assertions.assertThatThrownBy {
|
||||
@ -503,12 +502,12 @@ class EnumEvolvabilityTests {
|
||||
}.isInstanceOf(NotSerializableException::class.java)
|
||||
}
|
||||
|
||||
@CordaSerializationTransformEnumDefault (new = "D", old = "X")
|
||||
@CordaSerializationTransformEnumDefault(new = "D", old = "X")
|
||||
enum class RejectBadDefault { A, B, C, D }
|
||||
|
||||
@Test
|
||||
fun rejectBadDefault() {
|
||||
data class C (val e: RejectBadDefault)
|
||||
data class C(val e: RejectBadDefault)
|
||||
|
||||
val sf = testDefaultFactory()
|
||||
Assertions.assertThatThrownBy {
|
||||
@ -516,12 +515,12 @@ class EnumEvolvabilityTests {
|
||||
}.isInstanceOf(NotSerializableException::class.java)
|
||||
}
|
||||
|
||||
@CordaSerializationTransformEnumDefault (new = "D", old = "D")
|
||||
@CordaSerializationTransformEnumDefault(new = "D", old = "D")
|
||||
enum class RejectBadDefaultToSelf { A, B, C, D }
|
||||
|
||||
@Test
|
||||
fun rejectBadDefaultToSelf() {
|
||||
data class C (val e: RejectBadDefaultToSelf)
|
||||
data class C(val e: RejectBadDefaultToSelf)
|
||||
|
||||
val sf = testDefaultFactory()
|
||||
Assertions.assertThatThrownBy {
|
||||
|
@ -1,12 +1,13 @@
|
||||
package net.corda.nodeapi.internal.serialization.amqp
|
||||
|
||||
import net.corda.core.serialization.*
|
||||
import net.corda.core.serialization.CordaSerializationTransformEnumDefault
|
||||
import net.corda.core.serialization.CordaSerializationTransformEnumDefaults
|
||||
import net.corda.core.serialization.SerializedBytes
|
||||
import net.corda.testing.common.internal.ProjectStructure.projectRootDir
|
||||
import org.assertj.core.api.Assertions
|
||||
import org.junit.Test
|
||||
import java.io.File
|
||||
import java.io.NotSerializableException
|
||||
import java.net.URI
|
||||
import kotlin.test.assertEquals
|
||||
|
||||
// NOTE: To recreate the test files used by these tests uncomment the original test classes and comment
|
||||
@ -30,7 +31,7 @@ class EnumEvolveTests {
|
||||
val resource = "${javaClass.simpleName}.${testName()}"
|
||||
val sf = testDefaultFactory()
|
||||
|
||||
data class C (val e : DeserializeNewerSetToUnknown)
|
||||
data class C(val e: DeserializeNewerSetToUnknown)
|
||||
|
||||
// Uncomment to re-generate test files
|
||||
// File(URI("$localPath/$resource")).writeBytes(
|
||||
@ -40,7 +41,7 @@ class EnumEvolveTests {
|
||||
|
||||
val obj = DeserializationInput(sf).deserialize(SerializedBytes<C>(File(path.toURI()).readBytes()))
|
||||
|
||||
assertEquals (DeserializeNewerSetToUnknown.C, obj.e)
|
||||
assertEquals(DeserializeNewerSetToUnknown.C, obj.e)
|
||||
}
|
||||
|
||||
// Version of the class as it was serialised
|
||||
@ -78,9 +79,9 @@ class EnumEvolveTests {
|
||||
// of the evolution code
|
||||
val obj3 = DeserializationInput(sf).deserialize(SerializedBytes<C>(File(path3.toURI()).readBytes()))
|
||||
|
||||
assertEquals (DeserializeNewerSetToUnknown2.C, obj1.e)
|
||||
assertEquals (DeserializeNewerSetToUnknown2.C, obj2.e)
|
||||
assertEquals (DeserializeNewerSetToUnknown2.C, obj3.e)
|
||||
assertEquals(DeserializeNewerSetToUnknown2.C, obj1.e)
|
||||
assertEquals(DeserializeNewerSetToUnknown2.C, obj2.e)
|
||||
assertEquals(DeserializeNewerSetToUnknown2.C, obj3.e)
|
||||
}
|
||||
|
||||
|
||||
@ -149,7 +150,7 @@ class EnumEvolveTests {
|
||||
data class C(val e: DeserializeWithRename)
|
||||
|
||||
// Uncomment to re-generate test files, needs to be done in three stages
|
||||
val so = SerializationOutput(sf)
|
||||
// val so = SerializationOutput(sf)
|
||||
// First change
|
||||
// File(URI("$localPath/$resource.1.AA")).writeBytes(so.serialize(C(DeserializeWithRename.AA)).bytes)
|
||||
// File(URI("$localPath/$resource.1.B")).writeBytes(so.serialize(C(DeserializeWithRename.B)).bytes)
|
||||
@ -271,7 +272,7 @@ class EnumEvolveTests {
|
||||
data class C(val e: MultiOperations)
|
||||
|
||||
// Uncomment to re-generate test files, needs to be done in three stages
|
||||
val so = SerializationOutput(sf)
|
||||
// val so = SerializationOutput(sf)
|
||||
// First change
|
||||
// File(URI("$localPath/$resource.1.A")).writeBytes(so.serialize(C(MultiOperations.A)).bytes)
|
||||
// File(URI("$localPath/$resource.1.B")).writeBytes(so.serialize(C(MultiOperations.B)).bytes)
|
||||
@ -345,15 +346,15 @@ class EnumEvolveTests {
|
||||
Pair("$resource.5.G", MultiOperations.C))
|
||||
|
||||
fun load(l: List<Pair<String, MultiOperations>>) = l.map {
|
||||
Pair (DeserializationInput(sf).deserialize(SerializedBytes<C>(
|
||||
File(EvolvabilityTests::class.java.getResource(it.first).toURI()).readBytes())), it.second)
|
||||
Pair(DeserializationInput(sf).deserialize(SerializedBytes<C>(
|
||||
File(EvolvabilityTests::class.java.getResource(it.first).toURI()).readBytes())), it.second)
|
||||
}
|
||||
|
||||
load (stage1Resources).forEach { assertEquals(it.second, it.first.e) }
|
||||
load (stage2Resources).forEach { assertEquals(it.second, it.first.e) }
|
||||
load (stage3Resources).forEach { assertEquals(it.second, it.first.e) }
|
||||
load (stage4Resources).forEach { assertEquals(it.second, it.first.e) }
|
||||
load (stage5Resources).forEach { assertEquals(it.second, it.first.e) }
|
||||
load(stage1Resources).forEach { assertEquals(it.second, it.first.e) }
|
||||
load(stage2Resources).forEach { assertEquals(it.second, it.first.e) }
|
||||
load(stage3Resources).forEach { assertEquals(it.second, it.first.e) }
|
||||
load(stage4Resources).forEach { assertEquals(it.second, it.first.e) }
|
||||
load(stage5Resources).forEach { assertEquals(it.second, it.first.e) }
|
||||
}
|
||||
|
||||
@CordaSerializationTransformEnumDefault(old = "A", new = "F")
|
||||
@ -363,7 +364,7 @@ class EnumEvolveTests {
|
||||
fun badNewValue() {
|
||||
val sf = testDefaultFactory()
|
||||
|
||||
data class C (val e : BadNewValue)
|
||||
data class C(val e: BadNewValue)
|
||||
|
||||
Assertions.assertThatThrownBy {
|
||||
SerializationOutput(sf).serialize(C(BadNewValue.A))
|
||||
@ -374,13 +375,13 @@ class EnumEvolveTests {
|
||||
CordaSerializationTransformEnumDefault(new = "D", old = "E"),
|
||||
CordaSerializationTransformEnumDefault(new = "E", old = "A")
|
||||
)
|
||||
enum class OutOfOrder { A, B, C, D, E}
|
||||
enum class OutOfOrder { A, B, C, D, E }
|
||||
|
||||
@Test
|
||||
fun outOfOrder() {
|
||||
val sf = testDefaultFactory()
|
||||
|
||||
data class C (val e : OutOfOrder)
|
||||
data class C(val e: OutOfOrder)
|
||||
|
||||
Assertions.assertThatThrownBy {
|
||||
SerializationOutput(sf).serialize(C(OutOfOrder.A))
|
||||
|
@ -2,17 +2,14 @@ package net.corda.nodeapi.internal.serialization.amqp
|
||||
|
||||
import net.corda.core.serialization.ClassWhitelist
|
||||
import net.corda.core.serialization.CordaSerializable
|
||||
import org.junit.Test
|
||||
import java.time.DayOfWeek
|
||||
|
||||
import kotlin.test.assertEquals
|
||||
import kotlin.test.assertNotNull
|
||||
|
||||
import java.io.File
|
||||
import java.io.NotSerializableException
|
||||
|
||||
import net.corda.core.serialization.SerializedBytes
|
||||
import org.assertj.core.api.Assertions
|
||||
import org.junit.Test
|
||||
import java.io.File
|
||||
import java.io.NotSerializableException
|
||||
import java.time.DayOfWeek
|
||||
import kotlin.test.assertEquals
|
||||
import kotlin.test.assertNotNull
|
||||
|
||||
class EnumTests {
|
||||
enum class Bras {
|
||||
@ -42,8 +39,8 @@ class EnumTests {
|
||||
//}
|
||||
|
||||
// the new state, note in the test we serialised with value UNDERWIRE so the spacer
|
||||
// occuring after this won't have changed the ordinality of our serialised value
|
||||
// and thus should still be deserialisable
|
||||
// occurring after this won't have changed the ordinality of our serialised value
|
||||
// and thus should still be deserializable
|
||||
enum class OldBras2 {
|
||||
TSHIRT, UNDERWIRE, PUSHUP, SPACER, BRALETTE, SPACER2
|
||||
}
|
||||
|
@ -6,7 +6,6 @@ import net.corda.testing.common.internal.ProjectStructure.projectRootDir
|
||||
import org.junit.Test
|
||||
import java.io.File
|
||||
import java.io.NotSerializableException
|
||||
import java.net.URI
|
||||
import kotlin.test.assertEquals
|
||||
|
||||
// To regenerate any of the binary test files do the following
|
||||
@ -19,13 +18,14 @@ import kotlin.test.assertEquals
|
||||
// 5. Comment back out the generation code and uncomment the actual test
|
||||
class EvolvabilityTests {
|
||||
// When regenerating the test files this needs to be set to the file system location of the resource files
|
||||
@Suppress("UNUSED")
|
||||
var localPath = projectRootDir.toUri().resolve(
|
||||
"node-api/src/test/resources/net/corda/nodeapi/internal/serialization/amqp")
|
||||
|
||||
@Test
|
||||
fun simpleOrderSwapSameType() {
|
||||
val sf = testDefaultFactory()
|
||||
val resource= "EvolvabilityTests.simpleOrderSwapSameType"
|
||||
val resource = "EvolvabilityTests.simpleOrderSwapSameType"
|
||||
|
||||
val A = 1
|
||||
val B = 2
|
||||
@ -89,7 +89,7 @@ class EvolvabilityTests {
|
||||
|
||||
assertEquals(A, deserializedC.a)
|
||||
assertEquals(null, deserializedC.b)
|
||||
}
|
||||
}
|
||||
|
||||
@Test(expected = NotSerializableException::class)
|
||||
fun addAdditionalParam() {
|
||||
@ -369,6 +369,7 @@ class EvolvabilityTests {
|
||||
|
||||
// Add a parameter to inner but keep outer unchanged
|
||||
data class Inner(val a: Int, val b: String?)
|
||||
|
||||
data class Outer(val a: Int, val b: Inner)
|
||||
|
||||
val path = EvolvabilityTests::class.java.getResource(resource)
|
||||
|
@ -43,6 +43,11 @@ sourceSets {
|
||||
// This prevents problems in IntelliJ with regard to duplicate source roots.
|
||||
processResources {
|
||||
from file("$rootDir/config/dev/log4j2.xml")
|
||||
from file("$rootDir/config/dev/jolokia-access.xml")
|
||||
}
|
||||
|
||||
processTestResources {
|
||||
from file("$rootDir/config/test/jolokia-access.xml")
|
||||
}
|
||||
|
||||
// To find potential version conflicts, run "gradle htmlDependencyReport" and then look in
|
||||
@ -157,6 +162,9 @@ dependencies {
|
||||
// FastClasspathScanner: classpath scanning
|
||||
compile 'io.github.lukehutch:fast-classpath-scanner:2.0.21'
|
||||
|
||||
// Apache Shiro: authentication, authorization and session management.
|
||||
compile "org.apache.shiro:shiro-core:${shiro_version}"
|
||||
|
||||
// Integration test helpers
|
||||
integrationTestCompile "junit:junit:$junit_version"
|
||||
integrationTestCompile "org.assertj:assertj-core:${assertj_version}"
|
||||
@ -174,6 +182,9 @@ dependencies {
|
||||
testCompile "org.glassfish.jersey.core:jersey-server:${jersey_version}"
|
||||
testCompile "org.glassfish.jersey.containers:jersey-container-servlet-core:${jersey_version}"
|
||||
testCompile "org.glassfish.jersey.containers:jersey-container-jetty-http:${jersey_version}"
|
||||
|
||||
// Jolokia JVM monitoring agent
|
||||
runtime "org.jolokia:jolokia-jvm:${jolokia_version}:agent"
|
||||
}
|
||||
|
||||
task integrationTest(type: Test) {
|
||||
|
@ -42,7 +42,7 @@ task buildCordaJAR(type: FatCapsule, dependsOn: project(':node').compileJava) {
|
||||
|
||||
capsuleManifest {
|
||||
applicationVersion = corda_release_version
|
||||
appClassPath = ["jolokia-agent-war-${project.rootProject.ext.jolokia_version}.war"]
|
||||
appClassPath = ["jolokia-war-${project.rootProject.ext.jolokia_version}.war"]
|
||||
// See experimental/quasar-hook/README.md for how to generate.
|
||||
def quasarExcludeExpression = "x(antlr**;bftsmart**;ch**;co.paralleluniverse**;com.codahale**;com.esotericsoftware**;com.fasterxml**;com.google**;com.ibm**;com.intellij**;com.jcabi**;com.nhaarman**;com.opengamma**;com.typesafe**;com.zaxxer**;de.javakaffee**;groovy**;groovyjarjarantlr**;groovyjarjarasm**;io.atomix**;io.github**;io.netty**;jdk**;joptsimple**;junit**;kotlin**;net.bytebuddy**;net.i2p**;org.apache**;org.assertj**;org.bouncycastle**;org.codehaus**;org.crsh**;org.dom4j**;org.fusesource**;org.h2**;org.hamcrest**;org.hibernate**;org.jboss**;org.jcp**;org.joda**;org.junit**;org.mockito**;org.objectweb**;org.objenesis**;org.slf4j**;org.w3c**;org.xml**;org.yaml**;reflectasm**;rx**)"
|
||||
javaAgents = ["quasar-core-${quasar_version}-jdk8.jar=${quasarExcludeExpression}"]
|
||||
|
@ -20,6 +20,7 @@ import kotlin.test.assertTrue
|
||||
import kotlin.test.fail
|
||||
import org.assertj.core.api.Assertions.assertThat
|
||||
import java.util.regex.Pattern
|
||||
import kotlin.reflect.jvm.jvmName
|
||||
|
||||
class SSHServerTest {
|
||||
|
||||
@ -113,7 +114,7 @@ class SSHServerTest {
|
||||
channel.disconnect()
|
||||
session.disconnect()
|
||||
|
||||
assertThat(response).matches("(?s)User not permissioned with any of \\[[^]]*${flowNameEscaped}.*")
|
||||
assertThat(response).matches("(?s)User not authorized to perform RPC call .*")
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,303 @@
|
||||
package net.corda.node.services
|
||||
|
||||
import co.paralleluniverse.fibers.Suspendable
|
||||
import net.corda.client.rpc.CordaRPCClient
|
||||
import net.corda.client.rpc.PermissionException
|
||||
import net.corda.core.flows.FlowLogic
|
||||
import net.corda.core.flows.InitiatingFlow
|
||||
import net.corda.core.flows.StartableByRPC
|
||||
import net.corda.core.messaging.CordaRPCOps
|
||||
import net.corda.core.messaging.startFlow
|
||||
import net.corda.finance.flows.CashIssueFlow
|
||||
import net.corda.node.internal.Node
|
||||
import net.corda.node.internal.StartedNode
|
||||
import net.corda.node.services.config.PasswordEncryption
|
||||
import net.corda.node.services.config.SecurityConfiguration
|
||||
import net.corda.node.services.config.AuthDataSourceType
|
||||
import net.corda.nodeapi.internal.config.User
|
||||
import net.corda.nodeapi.internal.config.toConfig
|
||||
import net.corda.testing.internal.NodeBasedTest
|
||||
import net.corda.testing.*
|
||||
import org.apache.activemq.artemis.api.core.ActiveMQSecurityException
|
||||
import org.junit.After
|
||||
import org.junit.Before
|
||||
import org.junit.Test
|
||||
import java.sql.DriverManager
|
||||
import java.sql.Statement
|
||||
import java.util.*
|
||||
import kotlin.test.assertFailsWith
|
||||
|
||||
abstract class UserAuthServiceTest : NodeBasedTest() {
|
||||
|
||||
protected lateinit var node: StartedNode<Node>
|
||||
protected lateinit var client: CordaRPCClient
|
||||
|
||||
@Test
|
||||
fun `login with correct credentials`() {
|
||||
client.start("user", "foo")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `login with wrong credentials`() {
|
||||
client.start("user", "foo")
|
||||
assertFailsWith(
|
||||
ActiveMQSecurityException::class,
|
||||
"Login with incorrect password should fail") {
|
||||
client.start("user", "bar")
|
||||
}
|
||||
assertFailsWith(
|
||||
ActiveMQSecurityException::class,
|
||||
"Login with unknown username should fail") {
|
||||
client.start("X", "foo")
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `check flow permissions are respected`() {
|
||||
client.start("user", "foo").use {
|
||||
val proxy = it.proxy
|
||||
proxy.startFlowDynamic(DummyFlow::class.java)
|
||||
proxy.startTrackedFlowDynamic(DummyFlow::class.java)
|
||||
proxy.startFlow(::DummyFlow)
|
||||
assertFailsWith(
|
||||
PermissionException::class,
|
||||
"This user should not be authorized to start flow `CashIssueFlow`") {
|
||||
proxy.startFlowDynamic(CashIssueFlow::class.java)
|
||||
}
|
||||
assertFailsWith(
|
||||
PermissionException::class,
|
||||
"This user should not be authorized to start flow `CashIssueFlow`") {
|
||||
proxy.startTrackedFlowDynamic(CashIssueFlow::class.java)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `check permissions on RPC calls are respected`() {
|
||||
client.start("user", "foo").use {
|
||||
val proxy = it.proxy
|
||||
proxy.stateMachinesFeed()
|
||||
assertFailsWith(
|
||||
PermissionException::class,
|
||||
"This user should not be authorized to call 'nodeInfo'") {
|
||||
proxy.nodeInfo()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@StartableByRPC
|
||||
@InitiatingFlow
|
||||
class DummyFlow : FlowLogic<Unit>() {
|
||||
@Suspendable
|
||||
override fun call() = Unit
|
||||
}
|
||||
}
|
||||
|
||||
class UserAuthServiceEmbedded : UserAuthServiceTest() {
|
||||
|
||||
private val rpcUser = User("user", "foo", permissions = setOf(
|
||||
Permissions.startFlow<DummyFlow>(),
|
||||
Permissions.invokeRpc("vaultQueryBy"),
|
||||
Permissions.invokeRpc(CordaRPCOps::stateMachinesFeed),
|
||||
Permissions.invokeRpc("vaultQueryByCriteria")))
|
||||
|
||||
@Before
|
||||
fun setup() {
|
||||
val securityConfig = SecurityConfiguration(
|
||||
authService = SecurityConfiguration.AuthService.fromUsers(listOf(rpcUser)))
|
||||
|
||||
val configOverrides = mapOf("security" to securityConfig.toConfig().root().unwrapped())
|
||||
node = startNode(ALICE_NAME, rpcUsers = emptyList(), configOverrides = configOverrides)
|
||||
client = CordaRPCClient(node.internals.configuration.rpcAddress!!)
|
||||
}
|
||||
}
|
||||
|
||||
class UserAuthServiceTestsJDBC : UserAuthServiceTest() {
|
||||
|
||||
private val db = UsersDB(
|
||||
name = "SecurityDataSourceTestDB",
|
||||
users = listOf(UserAndRoles(username = "user",
|
||||
password = "foo",
|
||||
roles = listOf("default"))),
|
||||
roleAndPermissions = listOf(
|
||||
RoleAndPermissions(
|
||||
role = "default",
|
||||
permissions = listOf(
|
||||
Permissions.startFlow<DummyFlow>(),
|
||||
Permissions.invokeRpc("vaultQueryBy"),
|
||||
Permissions.invokeRpc(CordaRPCOps::stateMachinesFeed),
|
||||
Permissions.invokeRpc("vaultQueryByCriteria"))),
|
||||
RoleAndPermissions(
|
||||
role = "admin",
|
||||
permissions = listOf("ALL")
|
||||
)))
|
||||
|
||||
@Before
|
||||
fun setup() {
|
||||
val securityConfig = SecurityConfiguration(
|
||||
authService = SecurityConfiguration.AuthService(
|
||||
dataSource = SecurityConfiguration.AuthService.DataSource(
|
||||
type = AuthDataSourceType.DB,
|
||||
passwordEncryption = PasswordEncryption.NONE,
|
||||
connection = Properties().apply {
|
||||
setProperty("jdbcUrl", db.jdbcUrl)
|
||||
setProperty("username", "")
|
||||
setProperty("password", "")
|
||||
setProperty("driverClassName", "org.h2.Driver")
|
||||
}
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
val configOverrides = mapOf("security" to securityConfig.toConfig().root().unwrapped())
|
||||
node = startNode(ALICE_NAME, rpcUsers = emptyList(), configOverrides = configOverrides)
|
||||
client = CordaRPCClient(node.internals.configuration.rpcAddress!!)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `Add new users on-the-fly`() {
|
||||
assertFailsWith(
|
||||
ActiveMQSecurityException::class,
|
||||
"Login with incorrect password should fail") {
|
||||
client.start("user2", "bar")
|
||||
}
|
||||
|
||||
db.insert(UserAndRoles(
|
||||
username = "user2",
|
||||
password = "bar",
|
||||
roles = listOf("default")))
|
||||
|
||||
client.start("user2", "bar")
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `Modify user permissions during RPC session`() {
|
||||
db.insert(UserAndRoles(
|
||||
username = "user3",
|
||||
password = "bar",
|
||||
roles = emptyList()))
|
||||
|
||||
|
||||
client.start("user3", "bar").use {
|
||||
val proxy = it.proxy
|
||||
assertFailsWith(
|
||||
PermissionException::class,
|
||||
"This user should not be authorized to call 'nodeInfo'") {
|
||||
proxy.stateMachinesFeed()
|
||||
}
|
||||
db.addRoleToUser("user3", "default")
|
||||
proxy.stateMachinesFeed()
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `Revoke user permissions during RPC session`() {
|
||||
db.insert(UserAndRoles(
|
||||
username = "user4",
|
||||
password = "test",
|
||||
roles = listOf("default")))
|
||||
|
||||
client.start("user4", "test").use {
|
||||
val proxy = it.proxy
|
||||
proxy.stateMachinesFeed()
|
||||
db.deleteUser("user4")
|
||||
assertFailsWith(
|
||||
PermissionException::class,
|
||||
"This user should not be authorized to call 'nodeInfo'") {
|
||||
proxy.stateMachinesFeed()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
fun tearDown() {
|
||||
db.close()
|
||||
}
|
||||
}
|
||||
|
||||
private data class UserAndRoles(val username: String, val password: String, val roles: List<String>)
|
||||
private data class RoleAndPermissions(val role: String, val permissions: List<String>)
|
||||
|
||||
private class UsersDB : AutoCloseable {
|
||||
|
||||
val jdbcUrl: String
|
||||
|
||||
companion object {
|
||||
val DB_CREATE_SCHEMA = """
|
||||
CREATE TABLE users (username VARCHAR(256), password TEXT);
|
||||
CREATE TABLE user_roles (username VARCHAR(256), role_name VARCHAR(256));
|
||||
CREATE TABLE roles_permissions (role_name VARCHAR(256), permission TEXT);
|
||||
"""
|
||||
}
|
||||
|
||||
fun insert(user: UserAndRoles) {
|
||||
session {
|
||||
it.execute("INSERT INTO users VALUES ('${user.username}', '${user.password}')")
|
||||
for (role in user.roles) {
|
||||
it.execute("INSERT INTO user_roles VALUES ('${user.username}', '${role}')")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun insert(roleAndPermissions: RoleAndPermissions) {
|
||||
val (role, permissions) = roleAndPermissions
|
||||
session {
|
||||
for (permission in permissions) {
|
||||
it.execute("INSERT INTO roles_permissions VALUES ('$role', '$permission')")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun addRoleToUser(username: String, role: String) {
|
||||
session {
|
||||
it.execute("INSERT INTO user_roles VALUES ('$username', '$role')")
|
||||
}
|
||||
}
|
||||
|
||||
fun deleteRole(role: String) {
|
||||
session {
|
||||
it.execute("DELETE FROM role_permissions WHERE role_name = '$role'")
|
||||
}
|
||||
}
|
||||
|
||||
fun deleteUser(username: String) {
|
||||
session {
|
||||
it.execute("DELETE FROM users WHERE username = '$username'")
|
||||
it.execute("DELETE FROM user_roles WHERE username = '$username'")
|
||||
}
|
||||
}
|
||||
|
||||
inline private fun session(statement: (Statement) -> Unit) {
|
||||
DriverManager.getConnection(jdbcUrl).use {
|
||||
it.autoCommit = false
|
||||
it.createStatement().use(statement)
|
||||
it.commit()
|
||||
}
|
||||
}
|
||||
|
||||
constructor(name: String,
|
||||
users: List<UserAndRoles> = emptyList(),
|
||||
roleAndPermissions: List<RoleAndPermissions> = emptyList()) {
|
||||
|
||||
jdbcUrl = "jdbc:h2:mem:${name};DB_CLOSE_DELAY=-1"
|
||||
|
||||
session {
|
||||
it.execute(DB_CREATE_SCHEMA)
|
||||
}
|
||||
|
||||
require(users.map { it.username }.toSet().size == users.size) {
|
||||
"Duplicate username in input"
|
||||
}
|
||||
|
||||
users.forEach { insert(it) }
|
||||
roleAndPermissions.forEach { insert(it) }
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
DriverManager.getConnection(jdbcUrl).use {
|
||||
it.createStatement().use {
|
||||
it.execute("DROP ALL OBJECTS")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -35,7 +35,7 @@ import net.corda.node.internal.cordapp.CordappProviderInternal
|
||||
import net.corda.node.services.ContractUpgradeHandler
|
||||
import net.corda.node.services.FinalityHandler
|
||||
import net.corda.node.services.NotaryChangeHandler
|
||||
import net.corda.node.services.RPCUserService
|
||||
import net.corda.node.internal.security.RPCSecurityManager
|
||||
import net.corda.node.services.api.*
|
||||
import net.corda.node.services.config.BFTSMaRtConfiguration
|
||||
import net.corda.node.services.config.NodeConfiguration
|
||||
@ -141,7 +141,7 @@ abstract class AbstractNode(val configuration: NodeConfiguration,
|
||||
}
|
||||
}
|
||||
|
||||
lateinit var userService: RPCUserService get
|
||||
lateinit var securityManager: RPCSecurityManager get
|
||||
|
||||
/** Completes once the node has successfully registered with the network map service
|
||||
* or has loaded network map data from local database */
|
||||
@ -272,7 +272,7 @@ abstract class AbstractNode(val configuration: NodeConfiguration,
|
||||
protected abstract fun getRxIoScheduler(): Scheduler
|
||||
|
||||
open fun startShell(rpcOps: CordaRPCOps) {
|
||||
InteractiveShell.startShell(configuration, rpcOps, userService, _services.identityService, _services.database)
|
||||
InteractiveShell.startShell(configuration, rpcOps, securityManager, _services.identityService, _services.database)
|
||||
}
|
||||
|
||||
private fun initNodeInfo(networkMapCache: NetworkMapCacheBaseInternal,
|
||||
|
@ -2,6 +2,7 @@ package net.corda.node.internal
|
||||
|
||||
import com.codahale.metrics.JmxReporter
|
||||
import net.corda.core.concurrent.CordaFuture
|
||||
import net.corda.core.context.AuthServiceId
|
||||
import net.corda.core.internal.concurrent.openFuture
|
||||
import net.corda.core.internal.concurrent.thenMatch
|
||||
import net.corda.core.internal.uncheckedCast
|
||||
@ -16,11 +17,10 @@ import net.corda.core.utilities.NetworkHostAndPort
|
||||
import net.corda.core.utilities.contextLogger
|
||||
import net.corda.node.VersionInfo
|
||||
import net.corda.node.internal.cordapp.CordappLoader
|
||||
import net.corda.node.internal.security.RPCSecurityManagerImpl
|
||||
import net.corda.node.serialization.KryoServerSerializationScheme
|
||||
import net.corda.node.services.RPCUserServiceImpl
|
||||
import net.corda.node.services.api.SchemaService
|
||||
import net.corda.node.services.config.NodeConfiguration
|
||||
import net.corda.node.services.config.VerifierType
|
||||
import net.corda.node.services.config.*
|
||||
import net.corda.node.services.messaging.*
|
||||
import net.corda.node.services.transactions.InMemoryTransactionVerifierService
|
||||
import net.corda.node.utilities.AddressUtils
|
||||
@ -133,7 +133,12 @@ open class Node(configuration: NodeConfiguration,
|
||||
private var shutdownHook: ShutdownHook? = null
|
||||
|
||||
override fun makeMessagingService(database: CordaPersistence, info: NodeInfo): MessagingService {
|
||||
userService = RPCUserServiceImpl(configuration.rpcUsers)
|
||||
// Construct security manager reading users data either from the 'security' config section
|
||||
// if present or from rpcUsers list if the former is missing from config.
|
||||
val securityManagerConfig = configuration.security?.authService ?:
|
||||
SecurityConfiguration.AuthService.fromUsers(configuration.rpcUsers)
|
||||
|
||||
securityManager = RPCSecurityManagerImpl(securityManagerConfig)
|
||||
|
||||
val serverAddress = configuration.messagingServerAddress ?: makeLocalMessageBroker()
|
||||
val advertisedAddress = info.addresses.single()
|
||||
@ -156,7 +161,7 @@ open class Node(configuration: NodeConfiguration,
|
||||
|
||||
private fun makeLocalMessageBroker(): NetworkHostAndPort {
|
||||
with(configuration) {
|
||||
messageBroker = ArtemisMessagingServer(this, p2pAddress.port, rpcAddress?.port, services.networkMapCache, userService)
|
||||
messageBroker = ArtemisMessagingServer(this, p2pAddress.port, rpcAddress?.port, services.networkMapCache, securityManager)
|
||||
return NetworkHostAndPort("localhost", p2pAddress.port)
|
||||
}
|
||||
}
|
||||
@ -214,7 +219,7 @@ open class Node(configuration: NodeConfiguration,
|
||||
// Start up the MQ clients.
|
||||
rpcMessagingClient.run {
|
||||
runOnStop += this::stop
|
||||
start(rpcOps, userService)
|
||||
start(rpcOps, securityManager)
|
||||
}
|
||||
verifierMessagingClient?.run {
|
||||
runOnStop += this::stop
|
||||
@ -227,10 +232,10 @@ open class Node(configuration: NodeConfiguration,
|
||||
}
|
||||
|
||||
/**
|
||||
* If the node is persisting to an embedded H2 database, then expose this via TCP with a JDBC URL of the form:
|
||||
* If the node is persisting to an embedded H2 database, then expose this via TCP with a DB URL of the form:
|
||||
* jdbc:h2:tcp://<host>:<port>/node
|
||||
* with username and password as per the DataSource connection details. The key element to enabling this support is to
|
||||
* ensure that you specify a JDBC connection URL of the form jdbc:h2:file: in the node config and that you include
|
||||
* ensure that you specify a DB connection URL of the form jdbc:h2:file: in the node config and that you include
|
||||
* the H2 option AUTO_SERVER_PORT set to the port you desire to use (0 will give a dynamically allocated port number)
|
||||
* but exclude the H2 option AUTO_SERVER=TRUE.
|
||||
* This is not using the H2 "automatic mixed mode" directly but leans on many of the underpinnings. For more details
|
||||
|
@ -81,13 +81,13 @@ open class NodeStartup(val args: Array<String>) {
|
||||
conf0
|
||||
}
|
||||
|
||||
banJavaSerialisation(conf)
|
||||
preNetworkRegistration(conf)
|
||||
if (shouldRegisterWithNetwork(cmdlineOptions, conf)) {
|
||||
banJavaSerialisation(conf)
|
||||
preNetworkRegistration(conf)
|
||||
if (shouldRegisterWithNetwork(cmdlineOptions, conf)) {
|
||||
registerWithNetwork(cmdlineOptions, conf)
|
||||
return true
|
||||
}
|
||||
logStartupInfo(versionInfo, cmdlineOptions, conf)
|
||||
logStartupInfo(versionInfo, cmdlineOptions, conf)
|
||||
|
||||
try {
|
||||
cmdlineOptions.baseDirectory.createDirectories()
|
||||
|
@ -1,5 +1,6 @@
|
||||
package net.corda.node.internal
|
||||
|
||||
import net.corda.client.rpc.PermissionException
|
||||
import net.corda.core.contracts.ContractState
|
||||
import net.corda.core.crypto.SecureHash
|
||||
import net.corda.core.flows.FlowLogic
|
||||
@ -156,9 +157,12 @@ class RpcAuthorisationProxy(private val implementation: CordaRPCOps, private val
|
||||
private inline fun <RESULT> guard(methodName: String, action: () -> RESULT) = guard(methodName, emptyList(), action)
|
||||
|
||||
// TODO change to KFunction reference after Kotlin fixes https://youtrack.jetbrains.com/issue/KT-12140
|
||||
private inline fun <RESULT> guard(methodName: String, args: List<Any?>, action: () -> RESULT): RESULT {
|
||||
|
||||
context().requireEitherPermission(permissionsAllowing.invoke(methodName, args))
|
||||
return action()
|
||||
private inline fun <RESULT> guard(methodName: String, args: List<Class<*>>, action: () -> RESULT) : RESULT {
|
||||
if (!context().isPermitted(methodName, *(args.map { it.name }.toTypedArray()))) {
|
||||
throw PermissionException("User not authorized to perform RPC call $methodName with target $args")
|
||||
}
|
||||
else {
|
||||
return action()
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,28 @@
|
||||
package net.corda.node.internal.security
|
||||
|
||||
/**
|
||||
* Provides permission checking for the subject identified by the given [principal].
|
||||
*/
|
||||
interface AuthorizingSubject {
|
||||
|
||||
/**
|
||||
* Identity of underlying subject
|
||||
*/
|
||||
val principal: String
|
||||
|
||||
/**
|
||||
* Determines if the underlying subject is entitled to perform a certain action,
|
||||
* (e.g. an RPC invocation) represented by an [action] string followed by an
|
||||
* optional list of arguments.
|
||||
*/
|
||||
fun isPermitted(action : String, vararg arguments : String) : Boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* An implementation of [AuthorizingSubject] permitting all actions
|
||||
*/
|
||||
class AdminSubject(override val principal : String) : AuthorizingSubject {
|
||||
|
||||
override fun isPermitted(action: String, vararg arguments: String) = true
|
||||
|
||||
}
|
@ -0,0 +1,43 @@
|
||||
package net.corda.node.internal.security
|
||||
|
||||
import java.util.*
|
||||
|
||||
class Password(valueRaw: CharArray) : AutoCloseable {
|
||||
|
||||
constructor(value: String) : this(value.toCharArray())
|
||||
|
||||
private val internalValue = valueRaw.copyOf()
|
||||
|
||||
val value: CharArray
|
||||
get() = internalValue.copyOf()
|
||||
|
||||
val valueAsString: String
|
||||
get() = internalValue.joinToString("")
|
||||
|
||||
override fun close() {
|
||||
internalValue.indices.forEach { index ->
|
||||
internalValue[index] = MASK
|
||||
}
|
||||
}
|
||||
|
||||
override fun equals(other: Any?): Boolean {
|
||||
if (this === other) return true
|
||||
if (javaClass != other?.javaClass) return false
|
||||
|
||||
other as Password
|
||||
|
||||
if (!Arrays.equals(internalValue, other.internalValue)) return false
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
override fun hashCode(): Int {
|
||||
return Arrays.hashCode(internalValue)
|
||||
}
|
||||
|
||||
override fun toString(): String = (0..5).map { MASK }.joinToString("")
|
||||
|
||||
private companion object {
|
||||
private const val MASK = '*'
|
||||
}
|
||||
}
|
@ -0,0 +1,41 @@
|
||||
package net.corda.node.internal.security
|
||||
|
||||
import net.corda.core.context.AuthServiceId
|
||||
import org.apache.shiro.authc.AuthenticationException
|
||||
import javax.security.auth.login.FailedLoginException
|
||||
|
||||
/**
|
||||
* Manage security of RPC users, providing logic for user authentication and authorization.
|
||||
*/
|
||||
interface RPCSecurityManager : AutoCloseable {
|
||||
/**
|
||||
* An identifier associated to this security service
|
||||
*/
|
||||
val id: AuthServiceId
|
||||
|
||||
/**
|
||||
* Perform user authentication from principal and password. Return an [AuthorizingSubject] containing
|
||||
* the permissions of the user identified by the given [principal] if authentication via password succeeds,
|
||||
* otherwise a [FailedLoginException] is thrown.
|
||||
*/
|
||||
fun authenticate(principal: String, password: Password): AuthorizingSubject
|
||||
|
||||
/**
|
||||
* Construct an [AuthorizingSubject] instance con permissions of the user associated to
|
||||
* the given principal. Throws an exception if the principal cannot be resolved to a known user.
|
||||
*/
|
||||
fun buildSubject(principal: String): AuthorizingSubject
|
||||
}
|
||||
|
||||
/**
|
||||
* Non-throwing version of authenticate, returning null instead of throwing in case of authentication failure
|
||||
*/
|
||||
fun RPCSecurityManager.tryAuthenticate(principal: String, password: Password): AuthorizingSubject? {
|
||||
password.use {
|
||||
return try {
|
||||
authenticate(principal, password)
|
||||
} catch (e: AuthenticationException) {
|
||||
null
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,308 @@
|
||||
package net.corda.node.internal.security
|
||||
|
||||
import com.google.common.cache.CacheBuilder
|
||||
import com.google.common.cache.Cache
|
||||
import com.google.common.primitives.Ints
|
||||
import com.zaxxer.hikari.HikariConfig
|
||||
import com.zaxxer.hikari.HikariDataSource
|
||||
import net.corda.core.context.AuthServiceId
|
||||
import net.corda.core.utilities.loggerFor
|
||||
import net.corda.node.services.config.PasswordEncryption
|
||||
import net.corda.node.services.config.SecurityConfiguration
|
||||
import net.corda.node.services.config.AuthDataSourceType
|
||||
import net.corda.nodeapi.internal.config.User
|
||||
import org.apache.shiro.authc.*
|
||||
import org.apache.shiro.authc.credential.PasswordMatcher
|
||||
import org.apache.shiro.authc.credential.SimpleCredentialsMatcher
|
||||
import org.apache.shiro.authz.AuthorizationInfo
|
||||
import org.apache.shiro.authz.Permission
|
||||
import org.apache.shiro.authz.SimpleAuthorizationInfo
|
||||
import org.apache.shiro.authz.permission.DomainPermission
|
||||
import org.apache.shiro.authz.permission.PermissionResolver
|
||||
import org.apache.shiro.cache.CacheManager
|
||||
import org.apache.shiro.mgt.DefaultSecurityManager
|
||||
import org.apache.shiro.realm.AuthorizingRealm
|
||||
import org.apache.shiro.realm.jdbc.JdbcRealm
|
||||
import org.apache.shiro.subject.PrincipalCollection
|
||||
import org.apache.shiro.subject.SimplePrincipalCollection
|
||||
import javax.security.auth.login.FailedLoginException
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
import java.util.concurrent.TimeUnit
|
||||
private typealias AuthServiceConfig = SecurityConfiguration.AuthService
|
||||
|
||||
/**
|
||||
* Default implementation of [RPCSecurityManager] adapting
|
||||
* [org.apache.shiro.mgt.SecurityManager]
|
||||
*/
|
||||
class RPCSecurityManagerImpl(config: AuthServiceConfig) : RPCSecurityManager {
|
||||
|
||||
override val id = config.id
|
||||
private val manager: DefaultSecurityManager
|
||||
|
||||
init {
|
||||
manager = buildImpl(config)
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
manager.destroy()
|
||||
}
|
||||
|
||||
@Throws(FailedLoginException::class)
|
||||
override fun authenticate(principal: String, password: Password): AuthorizingSubject {
|
||||
password.use {
|
||||
val authToken = UsernamePasswordToken(principal, it.value)
|
||||
try {
|
||||
manager.authenticate(authToken)
|
||||
} catch (authcException: AuthenticationException) {
|
||||
throw FailedLoginException(authcException.toString())
|
||||
}
|
||||
return ShiroAuthorizingSubject(
|
||||
subjectId = SimplePrincipalCollection(principal, id.value),
|
||||
manager = manager)
|
||||
}
|
||||
}
|
||||
|
||||
override fun buildSubject(principal: String): AuthorizingSubject =
|
||||
ShiroAuthorizingSubject(
|
||||
subjectId = SimplePrincipalCollection(principal, id.value),
|
||||
manager = manager)
|
||||
|
||||
|
||||
companion object {
|
||||
|
||||
private val logger = loggerFor<RPCSecurityManagerImpl>()
|
||||
|
||||
/**
|
||||
* Instantiate RPCSecurityManager initialised with users data from a list of [User]
|
||||
*/
|
||||
fun fromUserList(id: AuthServiceId, users: List<User>) =
|
||||
RPCSecurityManagerImpl(
|
||||
AuthServiceConfig.fromUsers(users).copy(id = id))
|
||||
|
||||
// Build internal Shiro securityManager instance
|
||||
private fun buildImpl(config: AuthServiceConfig): DefaultSecurityManager {
|
||||
val realm = when (config.dataSource.type) {
|
||||
AuthDataSourceType.DB -> {
|
||||
logger.info("Constructing DB-backed security data source: ${config.dataSource.connection}")
|
||||
NodeJdbcRealm(config.dataSource)
|
||||
}
|
||||
AuthDataSourceType.INMEMORY -> {
|
||||
logger.info("Constructing realm from list of users in config ${config.dataSource.users!!}")
|
||||
InMemoryRealm(config.dataSource.users, config.id.value, config.dataSource.passwordEncryption)
|
||||
}
|
||||
}
|
||||
return DefaultSecurityManager(realm).also {
|
||||
// Setup optional cache layer if configured
|
||||
it.cacheManager = config.options?.cache?.let {
|
||||
GuavaCacheManager(
|
||||
timeToLiveSeconds = it.expiryTimeInSecs,
|
||||
maxSize = it.capacity)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide a representation of RPC permissions based on Apache Shiro permissions framework.
|
||||
* A permission represents a set of actions: for example, the set of all RPC invocations, or the set
|
||||
* of RPC invocations acting on a given class of Flows in input. A permission `implies` another one if
|
||||
* its set of actions contains the set of actions in the other one. In Apache Shiro, permissions are
|
||||
* represented by instances of the [Permission] interface which offers a single method: [implies], to
|
||||
* test if the 'x implies y' binary predicate is satisfied.
|
||||
*/
|
||||
private class RPCPermission : DomainPermission {
|
||||
|
||||
/**
|
||||
* Helper constructor directly setting actions and target field
|
||||
*
|
||||
* @param methods Set of allowed RPC methods
|
||||
* @param target An optional "target" type on which methods act
|
||||
*/
|
||||
constructor(methods: Set<String>, target: String? = null) : super(methods, target?.let { setOf(it) })
|
||||
|
||||
|
||||
/**
|
||||
* Default constructor instantiate an "ALL" permission
|
||||
*/
|
||||
constructor() : super()
|
||||
}
|
||||
|
||||
/**
|
||||
* A [org.apache.shiro.authz.permission.PermissionResolver] implementation for RPC permissions.
|
||||
* Provides a method to construct an [RPCPermission] instance from its string representation
|
||||
* in the form used by a Node admin.
|
||||
*
|
||||
* Currently valid permission strings have the forms:
|
||||
*
|
||||
* - `ALL`: allowing all type of RPC calls
|
||||
*
|
||||
* - `InvokeRpc.$RPCMethodName`: allowing to call a given RPC method without restrictions on its arguments.
|
||||
*
|
||||
* - `StartFlow.$FlowClassName`: allowing to call a `startFlow*` RPC method targeting a Flow instance
|
||||
* of a given class
|
||||
*
|
||||
*/
|
||||
private object RPCPermissionResolver : PermissionResolver {
|
||||
|
||||
private val SEPARATOR = '.'
|
||||
private val ACTION_START_FLOW = "startflow"
|
||||
private val ACTION_INVOKE_RPC = "invokerpc"
|
||||
private val ACTION_ALL = "all"
|
||||
|
||||
private val FLOW_RPC_CALLS = setOf("startFlowDynamic", "startTrackedFlowDynamic")
|
||||
|
||||
override fun resolvePermission(representation: String): Permission {
|
||||
|
||||
val action = representation.substringBefore(SEPARATOR).toLowerCase()
|
||||
when (action) {
|
||||
ACTION_INVOKE_RPC -> {
|
||||
val rpcCall = representation.substringAfter(SEPARATOR)
|
||||
require(representation.count { it == SEPARATOR } == 1) {
|
||||
"Malformed permission string"
|
||||
}
|
||||
return RPCPermission(setOf(rpcCall))
|
||||
}
|
||||
ACTION_START_FLOW -> {
|
||||
val targetFlow = representation.substringAfter(SEPARATOR)
|
||||
require(targetFlow.isNotEmpty()) {
|
||||
"Missing target flow after StartFlow"
|
||||
}
|
||||
return RPCPermission(FLOW_RPC_CALLS, targetFlow)
|
||||
}
|
||||
ACTION_ALL -> {
|
||||
// Leaving empty set of targets and actions to match everything
|
||||
return RPCPermission()
|
||||
}
|
||||
else -> throw IllegalArgumentException("Unkwnow permission action specifier: $action")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class ShiroAuthorizingSubject(
|
||||
private val subjectId: PrincipalCollection,
|
||||
private val manager: DefaultSecurityManager) : AuthorizingSubject {
|
||||
|
||||
override val principal get() = subjectId.primaryPrincipal.toString()
|
||||
|
||||
override fun isPermitted(action: String, vararg arguments: String) =
|
||||
manager.isPermitted(subjectId, RPCPermission(setOf(action), arguments.firstOrNull()))
|
||||
}
|
||||
|
||||
private fun buildCredentialMatcher(type: PasswordEncryption) = when (type) {
|
||||
PasswordEncryption.NONE -> SimpleCredentialsMatcher()
|
||||
PasswordEncryption.SHIRO_1_CRYPT -> PasswordMatcher()
|
||||
}
|
||||
|
||||
private class InMemoryRealm(users: List<User>,
|
||||
realmId: String,
|
||||
passwordEncryption: PasswordEncryption = PasswordEncryption.NONE) : AuthorizingRealm() {
|
||||
|
||||
private val authorizationInfoByUser: Map<String, AuthorizationInfo>
|
||||
private val authenticationInfoByUser: Map<String, AuthenticationInfo>
|
||||
|
||||
init {
|
||||
permissionResolver = RPCPermissionResolver
|
||||
users.forEach {
|
||||
require(it.username.matches("\\w+".toRegex())) {
|
||||
"Username ${it.username} contains invalid characters"
|
||||
}
|
||||
}
|
||||
val resolvePermission = { s: String -> permissionResolver.resolvePermission(s) }
|
||||
authorizationInfoByUser = users.associate {
|
||||
it.username to SimpleAuthorizationInfo().apply {
|
||||
objectPermissions = it.permissions.map { resolvePermission(it) }.toSet()
|
||||
roles = emptySet<String>()
|
||||
stringPermissions = emptySet<String>()
|
||||
}
|
||||
}
|
||||
authenticationInfoByUser = users.associate {
|
||||
it.username to SimpleAuthenticationInfo().apply {
|
||||
credentials = it.password
|
||||
principals = SimplePrincipalCollection(it.username, realmId)
|
||||
}
|
||||
}
|
||||
credentialsMatcher = buildCredentialMatcher(passwordEncryption)
|
||||
}
|
||||
|
||||
// Methods from AuthorizingRealm interface used by Shiro to query
|
||||
// for authentication/authorization data for a given user
|
||||
override fun doGetAuthenticationInfo(token: AuthenticationToken) =
|
||||
authenticationInfoByUser[token.principal as String]
|
||||
|
||||
override fun doGetAuthorizationInfo(principals: PrincipalCollection) =
|
||||
authorizationInfoByUser[principals.primaryPrincipal as String]
|
||||
}
|
||||
|
||||
private class NodeJdbcRealm(config: SecurityConfiguration.AuthService.DataSource) : JdbcRealm() {
|
||||
|
||||
init {
|
||||
credentialsMatcher = buildCredentialMatcher(config.passwordEncryption)
|
||||
setPermissionsLookupEnabled(true)
|
||||
dataSource = HikariDataSource(HikariConfig(config.connection!!))
|
||||
permissionResolver = RPCPermissionResolver
|
||||
}
|
||||
}
|
||||
|
||||
private typealias ShiroCache<K, V> = org.apache.shiro.cache.Cache<K, V>
|
||||
|
||||
/**
|
||||
* Adapts a [com.google.common.cache.Cache] to a [org.apache.shiro.cache.Cache] implementation.
|
||||
*/
|
||||
private fun <K, V> Cache<K, V>.toShiroCache(name: String) = object : ShiroCache<K, V> {
|
||||
|
||||
val name = name
|
||||
private val impl = this@toShiroCache
|
||||
|
||||
override operator fun get(key: K) = impl.getIfPresent(key)
|
||||
|
||||
override fun put(key: K, value: V): V? {
|
||||
val lastValue = get(key)
|
||||
impl.put(key, value)
|
||||
return lastValue
|
||||
}
|
||||
|
||||
override fun remove(key: K): V? {
|
||||
val lastValue = get(key)
|
||||
impl.invalidate(key)
|
||||
return lastValue
|
||||
}
|
||||
|
||||
override fun clear() {
|
||||
impl.invalidateAll()
|
||||
}
|
||||
|
||||
override fun size() = Ints.checkedCast(impl.size())
|
||||
override fun keys() = impl.asMap().keys
|
||||
override fun values() = impl.asMap().values
|
||||
override fun toString() = "Guava cache adapter [$impl]"
|
||||
}
|
||||
|
||||
/**
|
||||
* Implementation of [org.apache.shiro.cache.CacheManager] based on
|
||||
* cache implementation in [com.google.common.cache]
|
||||
*/
|
||||
private class GuavaCacheManager(val maxSize: Long,
|
||||
val timeToLiveSeconds: Long) : CacheManager {
|
||||
|
||||
private val instances = ConcurrentHashMap<String, ShiroCache<*, *>>()
|
||||
|
||||
override fun <K, V> getCache(name: String): ShiroCache<K, V> {
|
||||
val result = instances[name] ?: buildCache<K, V>(name)
|
||||
instances.putIfAbsent(name, result)
|
||||
return result as ShiroCache<K, V>
|
||||
}
|
||||
|
||||
private fun <K, V> buildCache(name: String) : ShiroCache<K, V> {
|
||||
logger.info("Constructing cache '$name' with maximumSize=$maxSize, TTL=${timeToLiveSeconds}s")
|
||||
return CacheBuilder.newBuilder()
|
||||
.expireAfterWrite(timeToLiveSeconds, TimeUnit.SECONDS)
|
||||
.maximumSize(maxSize)
|
||||
.build<K, V>()
|
||||
.toShiroCache(name)
|
||||
}
|
||||
|
||||
companion object {
|
||||
private val logger = loggerFor<GuavaCacheManager>()
|
||||
}
|
||||
}
|
@ -1,33 +0,0 @@
|
||||
package net.corda.node.services
|
||||
|
||||
import net.corda.core.context.AuthServiceId
|
||||
import net.corda.nodeapi.internal.config.User
|
||||
|
||||
/**
|
||||
* Service for retrieving [User] objects representing RPC users who are authorised to use the RPC system. A [User]
|
||||
* contains their login username and password along with a set of permissions for RPC services they are allowed access
|
||||
* to. These permissions are represented as [String]s to allow RPC implementations to add their own permissioning.
|
||||
*/
|
||||
interface RPCUserService {
|
||||
|
||||
fun getUser(username: String): User?
|
||||
val users: List<User>
|
||||
|
||||
val id: AuthServiceId
|
||||
}
|
||||
|
||||
// TODO Store passwords as salted hashes
|
||||
// TODO Or ditch this and consider something like Apache Shiro
|
||||
// TODO Need access to permission checks from inside flows and at other point during audit checking.
|
||||
class RPCUserServiceImpl(override val users: List<User>) : RPCUserService {
|
||||
|
||||
override val id: AuthServiceId = AuthServiceId("NODE_FILE_CONFIGURATION")
|
||||
|
||||
init {
|
||||
users.forEach {
|
||||
require(it.username.matches("\\w+".toRegex())) { "Username ${it.username} contains invalid characters" }
|
||||
}
|
||||
}
|
||||
|
||||
override fun getUser(username: String): User? = users.find { it.username == username }
|
||||
}
|
@ -1,6 +1,7 @@
|
||||
package net.corda.node.services.config
|
||||
|
||||
import com.typesafe.config.Config
|
||||
import net.corda.core.context.AuthServiceId
|
||||
import net.corda.core.identity.CordaX500Name
|
||||
import net.corda.core.utilities.NetworkHostAndPort
|
||||
import net.corda.core.utilities.seconds
|
||||
@ -21,6 +22,7 @@ interface NodeConfiguration : NodeSSLConfiguration {
|
||||
val exportJMXto: String
|
||||
val dataSourceProperties: Properties
|
||||
val rpcUsers: List<User>
|
||||
val security: SecurityConfiguration?
|
||||
val devMode: Boolean
|
||||
val devModeOptions: DevModeOptions?
|
||||
val compatibilityZoneURL: URL?
|
||||
@ -93,6 +95,7 @@ data class NodeConfigurationImpl(
|
||||
override val dataSourceProperties: Properties,
|
||||
override val compatibilityZoneURL: URL? = null,
|
||||
override val rpcUsers: List<User>,
|
||||
override val security : SecurityConfiguration? = null,
|
||||
override val verifierType: VerifierType,
|
||||
// TODO typesafe config supports the notion of durations. Make use of that by mapping it to java.time.Duration.
|
||||
// Then rename this to messageRedeliveryDelay and make it of type Duration
|
||||
@ -113,14 +116,18 @@ data class NodeConfigurationImpl(
|
||||
// TODO See TODO above. Rename this to nodeInfoPollingFrequency and make it of type Duration
|
||||
override val additionalNodeInfoPollingFrequencyMsec: Long = 5.seconds.toMillis(),
|
||||
override val sshd: SSHDConfiguration? = null,
|
||||
override val database: DatabaseConfig = DatabaseConfig(initialiseSchema = devMode)
|
||||
override val database: DatabaseConfig = DatabaseConfig(initialiseSchema = devMode, exportHibernateJMXStatistics = devMode)
|
||||
) : NodeConfiguration {
|
||||
|
||||
override val exportJMXto: String get() = "http"
|
||||
|
||||
init {
|
||||
// This is a sanity feature do not remove.
|
||||
require(!useTestClock || devMode) { "Cannot use test clock outside of dev mode" }
|
||||
require(devModeOptions == null || devMode) { "Cannot use devModeOptions outside of dev mode" }
|
||||
require(security == null || rpcUsers.isEmpty()) {
|
||||
"Cannot specify both 'rpcUsers' and 'security' in configuration"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -148,4 +155,78 @@ data class CertChainPolicyConfig(val role: String, private val policy: CertChain
|
||||
}
|
||||
}
|
||||
|
||||
data class SSHDConfiguration(val port: Int)
|
||||
data class SSHDConfiguration(val port: Int)
|
||||
|
||||
// Supported types of authentication/authorization data providers
|
||||
enum class AuthDataSourceType {
|
||||
// External RDBMS
|
||||
DB,
|
||||
|
||||
// Static dataset hard-coded in config
|
||||
INMEMORY
|
||||
}
|
||||
|
||||
// Password encryption scheme
|
||||
enum class PasswordEncryption {
|
||||
|
||||
// Password stored in clear
|
||||
NONE,
|
||||
|
||||
// Password salt-hashed using Apache Shiro flexible encryption format
|
||||
// [org.apache.shiro.crypto.hash.format.Shiro1CryptFormat]
|
||||
SHIRO_1_CRYPT
|
||||
}
|
||||
|
||||
// Subset of Node configuration related to security aspects
|
||||
data class SecurityConfiguration(val authService: SecurityConfiguration.AuthService) {
|
||||
|
||||
// Configure RPC/Shell users authentication/authorization service
|
||||
data class AuthService(val dataSource: AuthService.DataSource,
|
||||
val id: AuthServiceId = defaultAuthServiceId(dataSource.type),
|
||||
val options: AuthService.Options? = null) {
|
||||
|
||||
init {
|
||||
require(!(dataSource.type == AuthDataSourceType.INMEMORY &&
|
||||
options?.cache != null)) {
|
||||
"No cache supported for INMEMORY data provider"
|
||||
}
|
||||
}
|
||||
|
||||
// Optional components: cache
|
||||
data class Options(val cache: Options.Cache?) {
|
||||
|
||||
// Cache parameters
|
||||
data class Cache(val expiryTimeInSecs: Long, val capacity: Long)
|
||||
|
||||
}
|
||||
|
||||
// Provider of users credentials and permissions data
|
||||
data class DataSource(val type: AuthDataSourceType,
|
||||
val passwordEncryption: PasswordEncryption = PasswordEncryption.NONE,
|
||||
val connection: Properties? = null,
|
||||
val users: List<User>? = null) {
|
||||
init {
|
||||
when (type) {
|
||||
AuthDataSourceType.INMEMORY -> require(users != null && connection == null)
|
||||
AuthDataSourceType.DB -> require(users == null && connection != null)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
companion object {
|
||||
// If unspecified, we assign an AuthServiceId by default based on the
|
||||
// underlying data provider
|
||||
fun defaultAuthServiceId(type: AuthDataSourceType) = when (type) {
|
||||
AuthDataSourceType.INMEMORY -> AuthServiceId("NODE_CONFIG")
|
||||
AuthDataSourceType.DB -> AuthServiceId("REMOTE_DATABASE")
|
||||
}
|
||||
|
||||
fun fromUsers(users: List<User>) = AuthService(
|
||||
dataSource = DataSource(
|
||||
type = AuthDataSourceType.INMEMORY,
|
||||
users = users,
|
||||
passwordEncryption = PasswordEncryption.NONE),
|
||||
id = AuthServiceId("NODE_CONFIG"))
|
||||
}
|
||||
}
|
||||
}
|
@ -12,9 +12,13 @@ import net.corda.core.node.NodeInfo
|
||||
import net.corda.core.node.services.NetworkMapCache
|
||||
import net.corda.core.node.services.NetworkMapCache.MapChange
|
||||
import net.corda.core.serialization.SingletonSerializeAsToken
|
||||
import net.corda.core.utilities.*
|
||||
import net.corda.core.utilities.NetworkHostAndPort
|
||||
import net.corda.core.utilities.contextLogger
|
||||
import net.corda.core.utilities.debug
|
||||
import net.corda.core.utilities.parsePublicKeyBase58
|
||||
import net.corda.node.internal.Node
|
||||
import net.corda.node.services.RPCUserService
|
||||
import net.corda.node.internal.security.Password
|
||||
import net.corda.node.internal.security.RPCSecurityManager
|
||||
import net.corda.node.services.config.NodeConfiguration
|
||||
import net.corda.node.services.messaging.NodeLoginModule.Companion.NODE_ROLE
|
||||
import net.corda.node.services.messaging.NodeLoginModule.Companion.PEER_ROLE
|
||||
@ -25,13 +29,13 @@ import net.corda.nodeapi.internal.crypto.X509Utilities.CORDA_CLIENT_TLS
|
||||
import net.corda.nodeapi.internal.crypto.X509Utilities.CORDA_ROOT_CA
|
||||
import net.corda.nodeapi.internal.crypto.loadKeyStore
|
||||
import net.corda.nodeapi.*
|
||||
import net.corda.nodeapi.internal.ArtemisMessagingComponent.ArtemisPeerAddress
|
||||
import net.corda.nodeapi.internal.ArtemisMessagingComponent.Companion.INTERNAL_PREFIX
|
||||
import net.corda.nodeapi.internal.ArtemisMessagingComponent.Companion.NODE_USER
|
||||
import net.corda.nodeapi.internal.ArtemisMessagingComponent.Companion.NOTIFICATIONS_ADDRESS
|
||||
import net.corda.nodeapi.internal.ArtemisMessagingComponent.Companion.P2P_QUEUE
|
||||
import net.corda.nodeapi.internal.ArtemisMessagingComponent.Companion.PEERS_PREFIX
|
||||
import net.corda.nodeapi.internal.ArtemisMessagingComponent.Companion.PEER_USER
|
||||
import net.corda.nodeapi.internal.ArtemisMessagingComponent.ArtemisPeerAddress
|
||||
import net.corda.nodeapi.internal.ArtemisMessagingComponent.NodeAddress
|
||||
import net.corda.nodeapi.internal.requireOnDefaultFileSystem
|
||||
import org.apache.activemq.artemis.api.core.SimpleString
|
||||
@ -97,7 +101,7 @@ class ArtemisMessagingServer(private val config: NodeConfiguration,
|
||||
private val p2pPort: Int,
|
||||
val rpcPort: Int?,
|
||||
val networkMapCache: NetworkMapCache,
|
||||
val userService: RPCUserService) : SingletonSerializeAsToken() {
|
||||
val securityManager: RPCSecurityManager) : SingletonSerializeAsToken() {
|
||||
companion object {
|
||||
private val log = contextLogger()
|
||||
/** 10 MiB maximum allowed file size for attachments, including message headers. TODO: acquire this value from Network Map when supported. */
|
||||
@ -211,7 +215,12 @@ class ArtemisMessagingServer(private val config: NodeConfiguration,
|
||||
addressFullMessagePolicy = AddressFullMessagePolicy.FAIL
|
||||
}
|
||||
)
|
||||
}.configureAddressSecurity()
|
||||
// JMX enablement
|
||||
if (config.exportJMXto.isNotEmpty()) {isJMXManagementEnabled = true
|
||||
isJMXUseBrokerName = true}
|
||||
|
||||
}.configureAddressSecurity()
|
||||
|
||||
|
||||
private fun queueConfig(name: String, address: String = name, filter: String? = null, durable: Boolean): CoreQueueConfiguration {
|
||||
return CoreQueueConfiguration().apply {
|
||||
@ -229,13 +238,11 @@ class ArtemisMessagingServer(private val config: NodeConfiguration,
|
||||
* 3. RPC users. These are only given sufficient access to perform RPC with us.
|
||||
* 4. Verifiers. These are given read access to the verification request queue and write access to the response queue.
|
||||
*/
|
||||
private fun ConfigurationImpl.configureAddressSecurity() : Pair<Configuration, LoginListener> {
|
||||
private fun ConfigurationImpl.configureAddressSecurity(): Pair<Configuration, LoginListener> {
|
||||
val nodeInternalRole = Role(NODE_ROLE, true, true, true, true, true, true, true, true)
|
||||
securityRoles["$INTERNAL_PREFIX#"] = setOf(nodeInternalRole) // Do not add any other roles here as it's only for the node
|
||||
securityRoles[P2P_QUEUE] = setOf(nodeInternalRole, restrictedRole(PEER_ROLE, send = true))
|
||||
securityRoles[RPCApi.RPC_SERVER_QUEUE_NAME] = setOf(nodeInternalRole, restrictedRole(RPC_ROLE, send = true))
|
||||
// TODO: remove the NODE_USER role below once the webserver doesn't need it anymore.
|
||||
securityRoles["${RPCApi.RPC_CLIENT_QUEUE_NAME_PREFIX}.$NODE_USER.#"] = setOf(nodeInternalRole)
|
||||
// Each RPC user must have its own role and its own queue. This prevents users accessing each other's queues
|
||||
// and stealing RPC responses.
|
||||
val rolesAdderOnLogin = RolesAdderOnLogin { username ->
|
||||
@ -282,7 +289,7 @@ class ArtemisMessagingServer(private val config: NodeConfiguration,
|
||||
override fun getAppConfigurationEntry(name: String): Array<AppConfigurationEntry> {
|
||||
val options = mapOf(
|
||||
LoginListener::javaClass.name to loginListener,
|
||||
RPCUserService::class.java.name to userService,
|
||||
RPCSecurityManager::class.java.name to securityManager,
|
||||
NodeLoginModule.CERT_CHAIN_CHECKS_OPTION_NAME to certChecks)
|
||||
return arrayOf(AppConfigurationEntry(name, REQUIRED, options))
|
||||
}
|
||||
@ -557,7 +564,7 @@ class NodeLoginModule : LoginModule {
|
||||
private var loginSucceeded: Boolean = false
|
||||
private lateinit var subject: Subject
|
||||
private lateinit var callbackHandler: CallbackHandler
|
||||
private lateinit var userService: RPCUserService
|
||||
private lateinit var securityManager: RPCSecurityManager
|
||||
private lateinit var loginListener: LoginListener
|
||||
private lateinit var peerCertCheck: CertificateChainCheckPolicy.Check
|
||||
private lateinit var nodeCertCheck: CertificateChainCheckPolicy.Check
|
||||
@ -567,7 +574,7 @@ class NodeLoginModule : LoginModule {
|
||||
override fun initialize(subject: Subject, callbackHandler: CallbackHandler, sharedState: Map<String, *>, options: Map<String, *>) {
|
||||
this.subject = subject
|
||||
this.callbackHandler = callbackHandler
|
||||
userService = options[RPCUserService::class.java.name] as RPCUserService
|
||||
securityManager = options[RPCSecurityManager::class.java.name] as RPCSecurityManager
|
||||
loginListener = options[LoginListener::javaClass.name] as LoginListener
|
||||
val certChainChecks: Map<String, CertificateChainCheckPolicy.Check> = uncheckedCast(options[CERT_CHAIN_CHECKS_OPTION_NAME])
|
||||
peerCertCheck = certChainChecks[PEER_ROLE]!!
|
||||
@ -598,7 +605,7 @@ class NodeLoginModule : LoginModule {
|
||||
PEER_ROLE -> authenticatePeer(certificates)
|
||||
NODE_ROLE -> authenticateNode(certificates)
|
||||
VERIFIER_ROLE -> authenticateVerifier(certificates)
|
||||
RPC_ROLE -> authenticateRpcUser(password, username)
|
||||
RPC_ROLE -> authenticateRpcUser(username, Password(password))
|
||||
else -> throw FailedLoginException("Peer does not belong on our network")
|
||||
}
|
||||
principals += UserPrincipal(validatedUser)
|
||||
@ -629,13 +636,8 @@ class NodeLoginModule : LoginModule {
|
||||
return certificates.first().subjectDN.name
|
||||
}
|
||||
|
||||
private fun authenticateRpcUser(password: String, username: String): String {
|
||||
val rpcUser = userService.getUser(username) ?: throw FailedLoginException("User does not exist")
|
||||
if (password != rpcUser.password) {
|
||||
// TODO Switch to hashed passwords
|
||||
// TODO Retrieve client IP address to include in exception message
|
||||
throw FailedLoginException("Password for user $username does not match")
|
||||
}
|
||||
private fun authenticateRpcUser(username: String, password: Password): String {
|
||||
securityManager.authenticate(username, password)
|
||||
loginListener(username)
|
||||
principals += RolePrincipal(RPC_ROLE) // This enables the RPC client to send requests
|
||||
principals += RolePrincipal("${RPCApi.RPC_CLIENT_QUEUE_NAME_PREFIX}.$username") // This enables the RPC client to receive responses
|
||||
|
@ -4,7 +4,7 @@ import net.corda.core.identity.CordaX500Name
|
||||
import net.corda.core.messaging.RPCOps
|
||||
import net.corda.core.serialization.SingletonSerializeAsToken
|
||||
import net.corda.core.utilities.NetworkHostAndPort
|
||||
import net.corda.node.services.RPCUserService
|
||||
import net.corda.node.internal.security.RPCSecurityManager
|
||||
import net.corda.nodeapi.internal.config.SSLConfiguration
|
||||
import net.corda.nodeapi.internal.ArtemisMessagingComponent.Companion.NODE_USER
|
||||
import net.corda.nodeapi.internal.crypto.X509Utilities
|
||||
@ -16,10 +16,10 @@ class RPCMessagingClient(private val config: SSLConfiguration, serverAddress: Ne
|
||||
private val artemis = ArtemisMessagingClient(config, serverAddress)
|
||||
private var rpcServer: RPCServer? = null
|
||||
|
||||
fun start(rpcOps: RPCOps, userService: RPCUserService) = synchronized(this) {
|
||||
fun start(rpcOps: RPCOps, securityManager: RPCSecurityManager) = synchronized(this) {
|
||||
val locator = artemis.start().sessionFactory.serverLocator
|
||||
val myCert = loadKeyStore(config.sslKeystore, config.keyStorePassword).getX509Certificate(X509Utilities.CORDA_CLIENT_TLS)
|
||||
rpcServer = RPCServer(rpcOps, NODE_USER, NODE_USER, locator, userService, CordaX500Name.build(myCert.subjectX500Principal))
|
||||
rpcServer = RPCServer(rpcOps, NODE_USER, NODE_USER, locator, securityManager, CordaX500Name.build(myCert.subjectX500Principal))
|
||||
}
|
||||
|
||||
fun start2(serverControl: ActiveMQServerControl) = synchronized(this) {
|
||||
|
@ -26,11 +26,10 @@ import net.corda.core.serialization.SerializationContext
|
||||
import net.corda.core.serialization.SerializationDefaults.RPC_SERVER_CONTEXT
|
||||
import net.corda.core.serialization.deserialize
|
||||
import net.corda.core.utilities.*
|
||||
import net.corda.node.services.RPCUserService
|
||||
import net.corda.node.internal.security.AuthorizingSubject
|
||||
import net.corda.node.internal.security.RPCSecurityManager
|
||||
import net.corda.node.services.logging.pushToLoggingContext
|
||||
import net.corda.nodeapi.*
|
||||
import net.corda.nodeapi.internal.ArtemisMessagingComponent.Companion.NODE_USER
|
||||
import net.corda.nodeapi.internal.config.User
|
||||
import org.apache.activemq.artemis.api.core.Message
|
||||
import org.apache.activemq.artemis.api.core.SimpleString
|
||||
import org.apache.activemq.artemis.api.core.client.ActiveMQClient.DEFAULT_ACK_BATCH_SIZE
|
||||
@ -85,7 +84,7 @@ class RPCServer(
|
||||
private val rpcServerUsername: String,
|
||||
private val rpcServerPassword: String,
|
||||
private val serverLocator: ServerLocator,
|
||||
private val userService: RPCUserService,
|
||||
private val securityManager: RPCSecurityManager,
|
||||
private val nodeLegalName: CordaX500Name,
|
||||
private val rpcConfiguration: RPCServerConfiguration = RPCServerConfiguration.default
|
||||
) {
|
||||
@ -213,6 +212,7 @@ class RPCServer(
|
||||
reaperScheduledFuture?.cancel(false)
|
||||
rpcExecutor?.shutdownNow()
|
||||
reaperExecutor?.shutdownNow()
|
||||
securityManager.close()
|
||||
sessionAndConsumers.forEach {
|
||||
it.sessionFactory.close()
|
||||
}
|
||||
@ -357,9 +357,6 @@ class RPCServer(
|
||||
observableMap.cleanUp()
|
||||
}
|
||||
|
||||
// TODO remove this User once webserver doesn't need it
|
||||
private val nodeUser = User(NODE_USER, NODE_USER, setOf())
|
||||
|
||||
private fun ClientMessage.context(sessionId: Trace.SessionId): RpcAuthContext {
|
||||
val trace = Trace.newInstance(sessionId = sessionId)
|
||||
val externalTrace = externalTrace()
|
||||
@ -368,19 +365,10 @@ class RPCServer(
|
||||
return RpcAuthContext(InvocationContext.rpc(rpcActor.first, trace, externalTrace, impersonatedActor), rpcActor.second)
|
||||
}
|
||||
|
||||
private fun actorFrom(message: ClientMessage): Pair<Actor, RpcPermissions> {
|
||||
private fun actorFrom(message: ClientMessage): Pair<Actor, AuthorizingSubject> {
|
||||
val validatedUser = message.getStringProperty(Message.HDR_VALIDATED_USER) ?: throw IllegalArgumentException("Missing validated user from the Artemis message")
|
||||
val targetLegalIdentity = message.getStringProperty(RPCApi.RPC_TARGET_LEGAL_IDENTITY)?.let(CordaX500Name.Companion::parse) ?: nodeLegalName
|
||||
// TODO switch userService based on targetLegalIdentity
|
||||
val rpcUser = userService.getUser(validatedUser)
|
||||
return if (rpcUser != null) {
|
||||
Actor(Id(rpcUser.username), userService.id, targetLegalIdentity) to RpcPermissions(rpcUser.permissions)
|
||||
} else if (CordaX500Name.parse(validatedUser) == nodeLegalName) {
|
||||
// TODO remove this after Shell and WebServer will no longer need it
|
||||
Actor(Id(nodeUser.username), userService.id, targetLegalIdentity) to RpcPermissions(nodeUser.permissions)
|
||||
} else {
|
||||
throw IllegalArgumentException("Validated user '$validatedUser' is not an RPC user nor the NODE user")
|
||||
}
|
||||
return Pair(Actor(Id(validatedUser), securityManager.id, targetLegalIdentity), securityManager.buildSubject(validatedUser))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,30 +1,9 @@
|
||||
package net.corda.node.services.messaging
|
||||
|
||||
import net.corda.client.rpc.PermissionException
|
||||
import net.corda.core.context.InvocationContext
|
||||
import net.corda.node.services.Permissions
|
||||
import net.corda.nodeapi.internal.ArtemisMessagingComponent
|
||||
import net.corda.node.internal.security.AuthorizingSubject
|
||||
|
||||
data class RpcAuthContext(val invocation: InvocationContext, val grantedPermissions: RpcPermissions) {
|
||||
data class RpcAuthContext(val invocation: InvocationContext,
|
||||
private val authorizer: AuthorizingSubject)
|
||||
: AuthorizingSubject by authorizer
|
||||
|
||||
fun requirePermission(permission: String) = requireEitherPermission(setOf(permission))
|
||||
|
||||
fun requireEitherPermission(permissions: Set<String>): RpcAuthContext {
|
||||
|
||||
// TODO remove the NODE_USER condition once webserver and shell won't need it anymore
|
||||
if (invocation.principal().name != ArtemisMessagingComponent.NODE_USER && !grantedPermissions.coverAny(permissions)) {
|
||||
throw PermissionException("User not permissioned with any of $permissions, permissions are ${this.grantedPermissions}.")
|
||||
}
|
||||
return this
|
||||
}
|
||||
}
|
||||
|
||||
data class RpcPermissions(private val values: Set<String> = emptySet()) {
|
||||
|
||||
companion object {
|
||||
val NONE = RpcPermissions()
|
||||
val ALL = RpcPermissions(setOf("ALL"))
|
||||
}
|
||||
|
||||
fun coverAny(permissions: Set<String>) = !values.intersect(permissions + Permissions.all()).isEmpty()
|
||||
}
|
@ -4,31 +4,30 @@ import net.corda.core.context.Actor
|
||||
import net.corda.core.context.InvocationContext
|
||||
import net.corda.core.identity.CordaX500Name
|
||||
import net.corda.core.messaging.CordaRPCOps
|
||||
import net.corda.node.services.RPCUserService
|
||||
import net.corda.node.services.messaging.RpcPermissions
|
||||
import net.corda.node.internal.security.Password
|
||||
import net.corda.node.internal.security.RPCSecurityManager
|
||||
import net.corda.node.internal.security.tryAuthenticate
|
||||
import org.crsh.auth.AuthInfo
|
||||
import org.crsh.auth.AuthenticationPlugin
|
||||
import org.crsh.plugin.CRaSHPlugin
|
||||
|
||||
class CordaAuthenticationPlugin(val rpcOps:CordaRPCOps, val userService:RPCUserService, val nodeLegalName:CordaX500Name) : CRaSHPlugin<AuthenticationPlugin<String>>(), AuthenticationPlugin<String> {
|
||||
class CordaAuthenticationPlugin(private val rpcOps: CordaRPCOps, private val securityManager: RPCSecurityManager, private val nodeLegalName: CordaX500Name) : CRaSHPlugin<AuthenticationPlugin<String>>(), AuthenticationPlugin<String> {
|
||||
|
||||
override fun getImplementation(): AuthenticationPlugin<String> = this
|
||||
|
||||
override fun getName(): String = "corda"
|
||||
|
||||
override fun authenticate(username: String?, credential: String?): AuthInfo {
|
||||
|
||||
if (username == null || credential == null) {
|
||||
return AuthInfo.UNSUCCESSFUL
|
||||
}
|
||||
|
||||
val user = userService.getUser(username)
|
||||
|
||||
if (user != null && user.password == credential) {
|
||||
val actor = Actor(Actor.Id(username), userService.id, nodeLegalName)
|
||||
return CordaSSHAuthInfo(true, makeRPCOpsWithContext(rpcOps, InvocationContext.rpc(actor), RpcPermissions(user.permissions)))
|
||||
val authorizingSubject = securityManager.tryAuthenticate(username, Password(credential))
|
||||
if (authorizingSubject != null) {
|
||||
val actor = Actor(Actor.Id(username), securityManager.id, nodeLegalName)
|
||||
return CordaSSHAuthInfo(true, makeRPCOpsWithContext(rpcOps, InvocationContext.rpc(actor), authorizingSubject))
|
||||
}
|
||||
|
||||
return AuthInfo.UNSUCCESSFUL;
|
||||
return AuthInfo.UNSUCCESSFUL
|
||||
}
|
||||
|
||||
override fun getCredentialType(): Class<String> = String::class.java
|
||||
|
@ -25,11 +25,11 @@ import net.corda.core.messaging.StateMachineUpdate
|
||||
import net.corda.core.node.services.IdentityService
|
||||
import net.corda.node.internal.Node
|
||||
import net.corda.node.internal.StartedNode
|
||||
import net.corda.node.services.RPCUserService
|
||||
import net.corda.node.internal.security.AdminSubject
|
||||
import net.corda.node.internal.security.RPCSecurityManager
|
||||
import net.corda.node.services.config.NodeConfiguration
|
||||
import net.corda.node.services.messaging.CURRENT_RPC_CONTEXT
|
||||
import net.corda.node.services.messaging.RpcAuthContext
|
||||
import net.corda.node.services.messaging.RpcPermissions
|
||||
import net.corda.node.utilities.ANSIProgressRenderer
|
||||
import net.corda.node.utilities.StdoutANSIProgressRenderer
|
||||
import net.corda.nodeapi.internal.persistence.CordaPersistence
|
||||
@ -82,19 +82,19 @@ object InteractiveShell {
|
||||
private lateinit var node: StartedNode<Node>
|
||||
@VisibleForTesting
|
||||
internal lateinit var database: CordaPersistence
|
||||
private lateinit var rpcOps:CordaRPCOps
|
||||
private lateinit var userService:RPCUserService
|
||||
private lateinit var identityService:IdentityService
|
||||
private var shell:Shell? = null
|
||||
private lateinit var rpcOps: CordaRPCOps
|
||||
private lateinit var securityManager: RPCSecurityManager
|
||||
private lateinit var identityService: IdentityService
|
||||
private var shell: Shell? = null
|
||||
private lateinit var nodeLegalName: CordaX500Name
|
||||
|
||||
/**
|
||||
* Starts an interactive shell connected to the local terminal. This shell gives administrator access to the node
|
||||
* internals.
|
||||
*/
|
||||
fun startShell(configuration:NodeConfiguration, cordaRPCOps: CordaRPCOps, userService: RPCUserService, identityService: IdentityService, database: CordaPersistence) {
|
||||
fun startShell(configuration: NodeConfiguration, cordaRPCOps: CordaRPCOps, securityManager: RPCSecurityManager, identityService: IdentityService, database: CordaPersistence) {
|
||||
this.rpcOps = cordaRPCOps
|
||||
this.userService = userService
|
||||
this.securityManager = securityManager
|
||||
this.identityService = identityService
|
||||
this.nodeLegalName = configuration.myLegalName
|
||||
this.database = database
|
||||
@ -123,14 +123,14 @@ object InteractiveShell {
|
||||
}
|
||||
}
|
||||
|
||||
fun runLocalShell(node:StartedNode<Node>) {
|
||||
fun runLocalShell(node: StartedNode<Node>) {
|
||||
val terminal = TerminalFactory.create()
|
||||
val consoleReader = ConsoleReader("Corda", FileInputStream(FileDescriptor.`in`), System.out, terminal)
|
||||
val jlineProcessor = JLineProcessor(terminal.isAnsiSupported, shell, consoleReader, System.out)
|
||||
InterruptHandler { jlineProcessor.interrupt() }.install()
|
||||
thread(name = "Command line shell processor", isDaemon = true) {
|
||||
// Give whoever has local shell access administrator access to the node.
|
||||
val context = RpcAuthContext(net.corda.core.context.InvocationContext.shell(), RpcPermissions.ALL)
|
||||
val context = RpcAuthContext(net.corda.core.context.InvocationContext.shell(), AdminSubject("SHELL_USER"))
|
||||
CURRENT_RPC_CONTEXT.set(context)
|
||||
Emoji.renderIfSupported {
|
||||
jlineProcessor.run()
|
||||
@ -169,7 +169,7 @@ object InteractiveShell {
|
||||
// Don't use the Java language plugin (we may not have tools.jar available at runtime), this
|
||||
// will cause any commands using JIT Java compilation to be suppressed. In CRaSH upstream that
|
||||
// is only the 'jmx' command.
|
||||
return super.getPlugins().filterNot { it is JavaLanguage } + CordaAuthenticationPlugin(rpcOps, userService, nodeLegalName)
|
||||
return super.getPlugins().filterNot { it is JavaLanguage } + CordaAuthenticationPlugin(rpcOps, securityManager, nodeLegalName)
|
||||
}
|
||||
}
|
||||
val attributes = mapOf(
|
||||
@ -180,7 +180,7 @@ object InteractiveShell {
|
||||
context.refresh()
|
||||
this.config = config
|
||||
start(context)
|
||||
return context.getPlugin(ShellFactory::class.java).create(null, CordaSSHAuthInfo(false, makeRPCOpsWithContext(rpcOps, net.corda.core.context.InvocationContext.shell(), RpcPermissions.ALL), StdoutANSIProgressRenderer))
|
||||
return context.getPlugin(ShellFactory::class.java).create(null, CordaSSHAuthInfo(false, makeRPCOpsWithContext(rpcOps, net.corda.core.context.InvocationContext.shell(), AdminSubject("SHELL_USER")), StdoutANSIProgressRenderer))
|
||||
}
|
||||
}
|
||||
|
||||
@ -248,7 +248,7 @@ object InteractiveShell {
|
||||
} catch (e: NoApplicableConstructor) {
|
||||
output.println("No matching constructor found:", Color.red)
|
||||
e.errors.forEach { output.println("- $it", Color.red) }
|
||||
} catch (e:PermissionException) {
|
||||
} catch (e: PermissionException) {
|
||||
output.println(e.message ?: "Access denied", Color.red)
|
||||
} finally {
|
||||
InputStreamDeserializer.closeAll()
|
||||
@ -271,9 +271,9 @@ object InteractiveShell {
|
||||
*/
|
||||
@Throws(NoApplicableConstructor::class)
|
||||
fun <T> runFlowFromString(invoke: (Class<out FlowLogic<T>>, Array<out Any?>) -> FlowProgressHandle<T>,
|
||||
inputData: String,
|
||||
clazz: Class<out FlowLogic<T>>,
|
||||
om: ObjectMapper = yamlInputMapper): FlowProgressHandle<T> {
|
||||
inputData: String,
|
||||
clazz: Class<out FlowLogic<T>>,
|
||||
om: ObjectMapper = yamlInputMapper): FlowProgressHandle<T> {
|
||||
// For each constructor, attempt to parse the input data as a method call. Use the first that succeeds,
|
||||
// and keep track of the reasons we failed so we can print them out if no constructors are usable.
|
||||
val parser = StringToMethodCallParser(clazz, om)
|
||||
|
@ -1,36 +1,39 @@
|
||||
package net.corda.node.shell
|
||||
|
||||
import net.corda.core.context.InvocationContext
|
||||
import net.corda.core.messaging.*
|
||||
import net.corda.core.messaging.CordaRPCOps
|
||||
import net.corda.core.utilities.getOrThrow
|
||||
import net.corda.node.internal.security.AuthorizingSubject
|
||||
import net.corda.node.services.messaging.CURRENT_RPC_CONTEXT
|
||||
import net.corda.node.services.messaging.RpcAuthContext
|
||||
import net.corda.node.services.messaging.RpcPermissions
|
||||
import java.lang.reflect.InvocationTargetException
|
||||
import java.lang.reflect.Proxy
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.concurrent.Future
|
||||
|
||||
fun makeRPCOpsWithContext(cordaRPCOps: CordaRPCOps, invocationContext:InvocationContext, rpcPermissions: RpcPermissions) : CordaRPCOps {
|
||||
return Proxy.newProxyInstance(CordaRPCOps::class.java.classLoader, arrayOf(CordaRPCOps::class.java), { proxy, method, args ->
|
||||
RPCContextRunner(invocationContext, rpcPermissions) {
|
||||
try {
|
||||
method.invoke(cordaRPCOps, *(args ?: arrayOf()))
|
||||
} catch (e: InvocationTargetException) {
|
||||
// Unpack exception.
|
||||
throw e.targetException
|
||||
}
|
||||
}.get().getOrThrow()
|
||||
}) as CordaRPCOps
|
||||
fun makeRPCOpsWithContext(cordaRPCOps: CordaRPCOps, invocationContext:InvocationContext, authorizingSubject: AuthorizingSubject) : CordaRPCOps {
|
||||
|
||||
return Proxy.newProxyInstance(CordaRPCOps::class.java.classLoader, arrayOf(CordaRPCOps::class.java), { _, method, args ->
|
||||
RPCContextRunner(invocationContext, authorizingSubject) {
|
||||
try {
|
||||
method.invoke(cordaRPCOps, *(args ?: arrayOf()))
|
||||
} catch (e: InvocationTargetException) {
|
||||
// Unpack exception.
|
||||
throw e.targetException
|
||||
}
|
||||
}.get().getOrThrow()
|
||||
}) as CordaRPCOps
|
||||
}
|
||||
|
||||
private class RPCContextRunner<T>(val invocationContext:InvocationContext, val rpcPermissions: RpcPermissions, val block:() -> T) : Thread() {
|
||||
private class RPCContextRunner<T>(val invocationContext: InvocationContext, val authorizingSubject: AuthorizingSubject, val block:() -> T): Thread() {
|
||||
|
||||
private var result: CompletableFuture<T> = CompletableFuture()
|
||||
|
||||
override fun run() {
|
||||
CURRENT_RPC_CONTEXT.set(RpcAuthContext(invocationContext, rpcPermissions))
|
||||
CURRENT_RPC_CONTEXT.set(RpcAuthContext(invocationContext, authorizingSubject))
|
||||
try {
|
||||
result.complete(block())
|
||||
} catch (e:Throwable) {
|
||||
} catch (e: Throwable) {
|
||||
result.completeExceptionally(e)
|
||||
} finally {
|
||||
CURRENT_RPC_CONTEXT.remove()
|
||||
|
@ -11,6 +11,7 @@ dataSourceProperties = {
|
||||
}
|
||||
database = {
|
||||
transactionIsolationLevel = "REPEATABLE_READ"
|
||||
exportHibernateJMXStatistics = "false"
|
||||
}
|
||||
devMode = true
|
||||
useHTTPS = false
|
||||
|
@ -2,6 +2,7 @@ package net.corda.node
|
||||
|
||||
import co.paralleluniverse.fibers.Suspendable
|
||||
import net.corda.client.rpc.PermissionException
|
||||
import net.corda.core.context.AuthServiceId
|
||||
import net.corda.core.context.InvocationContext
|
||||
import net.corda.core.contracts.Amount
|
||||
import net.corda.core.contracts.ContractState
|
||||
@ -26,11 +27,12 @@ import net.corda.finance.flows.CashIssueFlow
|
||||
import net.corda.finance.flows.CashPaymentFlow
|
||||
import net.corda.node.internal.SecureCordaRPCOps
|
||||
import net.corda.node.internal.StartedNode
|
||||
import net.corda.node.internal.security.RPCSecurityManagerImpl
|
||||
import net.corda.node.services.Permissions.Companion.invokeRpc
|
||||
import net.corda.node.services.Permissions.Companion.startFlow
|
||||
import net.corda.node.services.messaging.CURRENT_RPC_CONTEXT
|
||||
import net.corda.node.services.messaging.RpcAuthContext
|
||||
import net.corda.node.services.messaging.RpcPermissions
|
||||
import net.corda.nodeapi.internal.config.User
|
||||
import net.corda.testing.*
|
||||
import net.corda.testing.node.MockNetwork
|
||||
import net.corda.testing.node.MockNetwork.MockNode
|
||||
@ -48,6 +50,15 @@ import kotlin.test.assertFalse
|
||||
import kotlin.test.assertNull
|
||||
import kotlin.test.assertTrue
|
||||
|
||||
// Mock an AuthorizingSubject instance sticking to a fixed set of permissions
|
||||
private fun buildSubject(principal: String, permissionStrings: Set<String>) =
|
||||
RPCSecurityManagerImpl.fromUserList(
|
||||
id = AuthServiceId("TEST"),
|
||||
users = listOf(User(username = principal,
|
||||
password = "",
|
||||
permissions = permissionStrings)))
|
||||
.buildSubject(principal)
|
||||
|
||||
class CordaRPCOpsImplTest {
|
||||
private companion object {
|
||||
val testJar = "net/corda/node/testing/test.jar"
|
||||
@ -67,7 +78,7 @@ class CordaRPCOpsImplTest {
|
||||
mockNet = MockNetwork(cordappPackages = listOf("net.corda.finance.contracts.asset"))
|
||||
aliceNode = mockNet.createNode(MockNodeParameters(legalName = ALICE_NAME))
|
||||
rpc = SecureCordaRPCOps(aliceNode.services, aliceNode.smm, aliceNode.database, aliceNode.services)
|
||||
CURRENT_RPC_CONTEXT.set(RpcAuthContext(InvocationContext.rpc(testActor()), RpcPermissions.NONE))
|
||||
CURRENT_RPC_CONTEXT.set(RpcAuthContext(InvocationContext.rpc(testActor()), buildSubject("TEST_USER", emptySet())))
|
||||
|
||||
mockNet.runNetwork()
|
||||
withPermissions(invokeRpc(CordaRPCOps::notaryIdentities)) {
|
||||
@ -301,7 +312,8 @@ class CordaRPCOpsImplTest {
|
||||
|
||||
val previous = CURRENT_RPC_CONTEXT.get()
|
||||
try {
|
||||
CURRENT_RPC_CONTEXT.set(previous.copy(grantedPermissions = RpcPermissions(permissions.toSet())))
|
||||
CURRENT_RPC_CONTEXT.set(previous.copy(authorizer =
|
||||
buildSubject(previous.principal, permissions.toSet())))
|
||||
action.invoke()
|
||||
} finally {
|
||||
CURRENT_RPC_CONTEXT.set(previous)
|
||||
|
@ -1,11 +1,12 @@
|
||||
package net.corda.node.services
|
||||
|
||||
|
||||
import net.corda.core.context.AuthServiceId
|
||||
import net.corda.node.internal.security.RPCSecurityManagerImpl
|
||||
import net.corda.nodeapi.internal.config.User
|
||||
import org.assertj.core.api.Assertions.assertThatThrownBy
|
||||
import org.junit.Test
|
||||
|
||||
class RPCUserServiceTest {
|
||||
class RPCSecurityManagerTest {
|
||||
|
||||
@Test
|
||||
fun `Artemis special characters not permitted in RPC usernames`() {
|
||||
@ -15,6 +16,6 @@ class RPCUserServiceTest {
|
||||
}
|
||||
|
||||
private fun configWithRPCUsername(username: String) {
|
||||
RPCUserServiceImpl(listOf(User(username, "password", setOf())))
|
||||
RPCSecurityManagerImpl.fromUserList(users = listOf(User(username, "password", setOf())), id = AuthServiceId("TEST"))
|
||||
}
|
||||
}
|
@ -1,9 +1,10 @@
|
||||
package net.corda.node.services.messaging
|
||||
|
||||
import net.corda.core.context.AuthServiceId
|
||||
import net.corda.core.crypto.generateKeyPair
|
||||
import net.corda.core.utilities.NetworkHostAndPort
|
||||
import net.corda.node.services.RPCUserService
|
||||
import net.corda.node.services.RPCUserServiceImpl
|
||||
import net.corda.node.internal.security.RPCSecurityManager
|
||||
import net.corda.node.internal.security.RPCSecurityManagerImpl
|
||||
import net.corda.node.services.config.NodeConfiguration
|
||||
import net.corda.node.services.config.configureWithDevSSLCertificate
|
||||
import net.corda.node.services.network.NetworkMapCacheImpl
|
||||
@ -50,7 +51,7 @@ class ArtemisMessagingTests {
|
||||
|
||||
private lateinit var config: NodeConfiguration
|
||||
private lateinit var database: CordaPersistence
|
||||
private lateinit var userService: RPCUserService
|
||||
private lateinit var securityManager: RPCSecurityManager
|
||||
private var messagingClient: P2PMessagingClient? = null
|
||||
private var messagingServer: ArtemisMessagingServer? = null
|
||||
|
||||
@ -58,7 +59,7 @@ class ArtemisMessagingTests {
|
||||
|
||||
@Before
|
||||
fun setUp() {
|
||||
userService = RPCUserServiceImpl(emptyList())
|
||||
securityManager = RPCSecurityManagerImpl.fromUserList(users = emptyList(), id = AuthServiceId("TEST"))
|
||||
config = testNodeConfiguration(
|
||||
baseDirectory = temporaryFolder.root.toPath(),
|
||||
myLegalName = ALICE.name)
|
||||
@ -169,7 +170,7 @@ class ArtemisMessagingTests {
|
||||
}
|
||||
|
||||
private fun createMessagingServer(local: Int = serverPort, rpc: Int = rpcPort): ArtemisMessagingServer {
|
||||
return ArtemisMessagingServer(config, local, rpc, networkMapCache, userService).apply {
|
||||
return ArtemisMessagingServer(config, local, rpc, networkMapCache, securityManager).apply {
|
||||
config.configureWithDevSSLCertificate()
|
||||
messagingServer = this
|
||||
}
|
||||
|
@ -868,7 +868,7 @@ class HibernateConfigurationTest {
|
||||
}
|
||||
|
||||
/**
|
||||
* Test invoking SQL query using JDBC connection (session)
|
||||
* Test invoking SQL query using DB connection (session)
|
||||
*/
|
||||
@Test
|
||||
fun `test calling an arbitrary JDBC native query`() {
|
||||
|
@ -2037,7 +2037,7 @@ class VaultQueryTests {
|
||||
* USE CASE demonstrations (outside of mainline Corda)
|
||||
*
|
||||
* 1) Template / Tutorial CorDapp service using Vault API Custom Query to access attributes of IOU State
|
||||
* 2) Template / Tutorial Flow using a JDBC session to execute a custom query
|
||||
* 2) Template / Tutorial Flow using a DB session to execute a custom query
|
||||
* 3) Template / Tutorial CorDapp service query extension executing Named Queries via JPA
|
||||
* 4) Advanced pagination queries using Spring Data (and/or Hibernate/JPQL)
|
||||
*/
|
||||
|
@ -4,17 +4,20 @@ import net.corda.core.concurrent.CordaFuture
|
||||
import net.corda.core.internal.div
|
||||
import net.corda.core.internal.list
|
||||
import net.corda.core.internal.readLines
|
||||
import net.corda.core.utilities.NetworkHostAndPort
|
||||
import net.corda.core.utilities.getOrThrow
|
||||
import net.corda.node.internal.NodeStartup
|
||||
import net.corda.testing.DUMMY_BANK_A
|
||||
import net.corda.testing.DUMMY_NOTARY
|
||||
import net.corda.testing.DUMMY_REGULATOR
|
||||
import net.corda.testing.common.internal.ProjectStructure.projectRootDir
|
||||
import net.corda.testing.http.HttpApi
|
||||
import net.corda.testing.internal.addressMustBeBound
|
||||
import net.corda.testing.internal.addressMustNotBeBound
|
||||
import net.corda.testing.internal.internalDriver
|
||||
import net.corda.testing.node.NotarySpec
|
||||
import org.assertj.core.api.Assertions.assertThat
|
||||
import org.json.simple.JSONObject
|
||||
import org.junit.Test
|
||||
import java.util.concurrent.Executors
|
||||
import java.util.concurrent.ScheduledExecutorService
|
||||
@ -67,6 +70,20 @@ class DriverTests {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `monitoring mode enables jolokia exporting of JMX metrics via HTTP JSON`() {
|
||||
driver(jmxPolicy = JmxPolicy(true)) {
|
||||
// start another node so we gain access to node JMX metrics
|
||||
startNode(providedName = DUMMY_REGULATOR.name).getOrThrow()
|
||||
|
||||
val webAddress = NetworkHostAndPort("localhost", 7006)
|
||||
// request access to some JMX metrics via Jolokia HTTP/JSON
|
||||
val api = HttpApi.fromHostAndPort(webAddress, "/jolokia/")
|
||||
val versionAsJson = api.getJson<JSONObject>("/jolokia/version/")
|
||||
assertThat(versionAsJson.getValue("status")).isEqualTo(200)
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
fun `started node, which is not waited for in the driver, is shutdown when the driver exits`() {
|
||||
// First check that the process-id file is created by the node on startup, so that we can be sure our check that
|
||||
|
@ -127,6 +127,10 @@ data class NodeParameters(
|
||||
fun setMaximumHeapSize(maximumHeapSize: String) = copy(maximumHeapSize = maximumHeapSize)
|
||||
}
|
||||
|
||||
data class JmxPolicy(val startJmxHttpServer: Boolean = false,
|
||||
val jmxHttpServerPortAllocation: PortAllocation? =
|
||||
if (startJmxHttpServer) PortAllocation.Incremental(7005) else null)
|
||||
|
||||
/**
|
||||
* [driver] allows one to start up nodes like this:
|
||||
* driver {
|
||||
@ -154,6 +158,9 @@ data class NodeParameters(
|
||||
* not. Note that this may be overridden in [DriverDSL.startNode].
|
||||
* @param notarySpecs The notaries advertised for this network. These nodes will be started automatically and will be
|
||||
* available from [DriverDSL.notaryHandles]. Defaults to a simple validating notary.
|
||||
* @param jmxPolicy Used to specify whether to expose JMX metrics via Jolokia HHTP/JSON. Defines two attributes:
|
||||
* startJmxHttpServer: indicates whether the spawned nodes should start with a Jolokia JMX agent to enable remote JMX monitoring using HTTP/JSON.
|
||||
* jmxHttpServerPortAllocation: the port allocation strategy to use for remote Jolokia/JMX monitoring over HTTP. Defaults to incremental.
|
||||
* @param dsl The dsl itself.
|
||||
* @return The value returned in the [dsl] closure.
|
||||
*/
|
||||
@ -170,6 +177,7 @@ fun <A> driver(
|
||||
waitForAllNodesToFinish: Boolean = defaultParameters.waitForAllNodesToFinish,
|
||||
notarySpecs: List<NotarySpec> = defaultParameters.notarySpecs,
|
||||
extraCordappPackagesToScan: List<String> = defaultParameters.extraCordappPackagesToScan,
|
||||
jmxPolicy: JmxPolicy = JmxPolicy(),
|
||||
dsl: DriverDSL.() -> A
|
||||
): A {
|
||||
return genericDriver(
|
||||
@ -184,6 +192,7 @@ fun <A> driver(
|
||||
waitForNodesToFinish = waitForAllNodesToFinish,
|
||||
notarySpecs = notarySpecs,
|
||||
extraCordappPackagesToScan = extraCordappPackagesToScan,
|
||||
jmxPolicy = jmxPolicy,
|
||||
compatibilityZone = null
|
||||
),
|
||||
coerce = { it },
|
||||
@ -219,7 +228,9 @@ data class DriverParameters(
|
||||
val startNodesInProcess: Boolean = false,
|
||||
val waitForAllNodesToFinish: Boolean = false,
|
||||
val notarySpecs: List<NotarySpec> = listOf(NotarySpec(DUMMY_NOTARY.name)),
|
||||
val extraCordappPackagesToScan: List<String> = emptyList()
|
||||
val extraCordappPackagesToScan: List<String> = emptyList(),
|
||||
val jmxPolicy: JmxPolicy = JmxPolicy()
|
||||
|
||||
) {
|
||||
fun setIsDebug(isDebug: Boolean) = copy(isDebug = isDebug)
|
||||
fun setDriverDirectory(driverDirectory: Path) = copy(driverDirectory = driverDirectory)
|
||||
@ -232,4 +243,5 @@ data class DriverParameters(
|
||||
fun setWaitForAllNodesToFinish(waitForAllNodesToFinish: Boolean) = copy(waitForAllNodesToFinish = waitForAllNodesToFinish)
|
||||
fun setNotarySpecs(notarySpecs: List<NotarySpec>) = copy(notarySpecs = notarySpecs)
|
||||
fun setExtraCordappPackagesToScan(extraCordappPackagesToScan: List<String>) = copy(extraCordappPackagesToScan = extraCordappPackagesToScan)
|
||||
fun setJmxPolicy(jmxPolicy: JmxPolicy) = copy(jmxPolicy = jmxPolicy)
|
||||
}
|
||||
|
@ -83,6 +83,7 @@ class DriverDSLImpl(
|
||||
val startNodesInProcess: Boolean,
|
||||
val waitForNodesToFinish: Boolean,
|
||||
extraCordappPackagesToScan: List<String>,
|
||||
val jmxPolicy: JmxPolicy,
|
||||
val notarySpecs: List<NotarySpec>,
|
||||
val compatibilityZone: CompatibilityZoneParams?
|
||||
) : InternalDriverDSL {
|
||||
@ -109,11 +110,25 @@ class DriverDSLImpl(
|
||||
|
||||
//TODO: remove this once we can bundle quasar properly.
|
||||
private val quasarJarPath: String by lazy {
|
||||
val cl = ClassLoader.getSystemClassLoader()
|
||||
val urls = (cl as URLClassLoader).urLs
|
||||
val quasarPattern = ".*quasar.*\\.jar$".toRegex()
|
||||
val quasarFileUrl = urls.first { quasarPattern.matches(it.path) }
|
||||
Paths.get(quasarFileUrl.toURI()).toString()
|
||||
resolveJar(".*quasar.*\\.jar$")
|
||||
}
|
||||
|
||||
private val jolokiaJarPath: String by lazy {
|
||||
resolveJar(".*jolokia-jvm-.*-agent\\.jar$")
|
||||
}
|
||||
|
||||
private fun resolveJar(jarNamePattern: String): String {
|
||||
return try {
|
||||
val cl = ClassLoader.getSystemClassLoader()
|
||||
val urls = (cl as URLClassLoader).urLs
|
||||
val jarPattern = jarNamePattern.toRegex()
|
||||
val jarFileUrl = urls.first { jarPattern.matches(it.path) }
|
||||
Paths.get(jarFileUrl.toURI()).toString()
|
||||
}
|
||||
catch(e: Exception) {
|
||||
log.warn("Unable to locate JAR `$jarNamePattern` on classpath: ${e.message}", e)
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
override fun shutdown() {
|
||||
@ -522,8 +537,7 @@ class DriverDSLImpl(
|
||||
}
|
||||
} else {
|
||||
val debugPort = if (isDebug) debugPortAllocation.nextPort() else null
|
||||
val process = startOutOfProcessNode(configuration, config, quasarJarPath, debugPort,
|
||||
systemProperties, cordappPackages, maximumHeapSize, initialRegistration = false)
|
||||
val monitorPort = if (jmxPolicy.startJmxHttpServer) jmxPolicy.jmxHttpServerPortAllocation?.nextPort() else nullval process = startOutOfProcessNode(configuration, config, quasarJarPath, debugPort,jolokiaJarPath, monitorPort, systemProperties, cordappPackages, maximumHeapSize, initialRegistration = false)
|
||||
if (waitForNodesToFinish) {
|
||||
state.locked {
|
||||
processes += process
|
||||
@ -616,12 +630,14 @@ class DriverDSLImpl(
|
||||
config: Config,
|
||||
quasarJarPath: String,
|
||||
debugPort: Int?,
|
||||
jolokiaJarPath: String,
|
||||
monitorPort: Int?,
|
||||
overriddenSystemProperties: Map<String, String>,
|
||||
cordappPackages: List<String>,
|
||||
maximumHeapSize: String,
|
||||
initialRegistration: Boolean
|
||||
): Process {
|
||||
log.info("Starting out-of-process Node ${nodeConf.myLegalName.organisation}, debug port is " + (debugPort ?: "not enabled"))
|
||||
log.info("Starting out-of-process Node ${nodeConf.myLegalName.organisation}, debug port is " + (debugPort ?: "not enabled") + ", jolokia monitoring port is " + (monitorPort ?: "not enabled"))
|
||||
// Write node.conf
|
||||
writeConfig(nodeConf.baseDirectory, "node.conf", config)
|
||||
|
||||
@ -648,6 +664,7 @@ class DriverDSLImpl(
|
||||
"org.objenesis**;org.slf4j**;org.w3c**;org.xml**;org.yaml**;reflectasm**;rx**)"
|
||||
val extraJvmArguments = systemProperties.removeResolvedClasspath().map { "-D${it.key}=${it.value}" } +
|
||||
"-javaagent:$quasarJarPath=$excludePattern"
|
||||
val jolokiaAgent = monitorPort?.let { "-javaagent:$jolokiaJarPath=port=$monitorPort,host=localhost" }
|
||||
val loggingLevel = if (debugPort == null) "INFO" else "DEBUG"
|
||||
|
||||
val arguments = mutableListOf(
|
||||
@ -663,7 +680,7 @@ class DriverDSLImpl(
|
||||
className = "net.corda.node.Corda", // cannot directly get class for this, so just use string
|
||||
arguments = arguments,
|
||||
jdwpPort = debugPort,
|
||||
extraJvmArguments = extraJvmArguments,
|
||||
extraJvmArguments = extraJvmArguments + listOfNotNull(jolokiaAgent),
|
||||
errorLogPath = nodeConf.baseDirectory / NodeStartup.LOGS_DIRECTORY_NAME / "error.log",
|
||||
workingDirectory = nodeConf.baseDirectory,
|
||||
maximumHeapSize = maximumHeapSize
|
||||
@ -796,6 +813,7 @@ fun <DI : DriverDSL, D : InternalDriverDSL, A> genericDriver(
|
||||
startNodesInProcess: Boolean = defaultParameters.startNodesInProcess,
|
||||
notarySpecs: List<NotarySpec>,
|
||||
extraCordappPackagesToScan: List<String> = defaultParameters.extraCordappPackagesToScan,
|
||||
jmxPolicy: JmxPolicy = JmxPolicy(),
|
||||
driverDslWrapper: (DriverDSLImpl) -> D,
|
||||
coerce: (D) -> DI, dsl: DI.() -> A
|
||||
): A {
|
||||
@ -811,6 +829,7 @@ fun <DI : DriverDSL, D : InternalDriverDSL, A> genericDriver(
|
||||
startNodesInProcess = startNodesInProcess,
|
||||
waitForNodesToFinish = waitForNodesToFinish,
|
||||
extraCordappPackagesToScan = extraCordappPackagesToScan,
|
||||
jmxPolicy = jmxPolicy,
|
||||
notarySpecs = notarySpecs,
|
||||
compatibilityZone = null
|
||||
)
|
||||
|
@ -16,7 +16,7 @@ import net.corda.core.internal.div
|
||||
import net.corda.core.internal.uncheckedCast
|
||||
import net.corda.core.messaging.RPCOps
|
||||
import net.corda.core.utilities.NetworkHostAndPort
|
||||
import net.corda.node.services.RPCUserService
|
||||
import net.corda.node.internal.security.RPCSecurityManagerImpl
|
||||
import net.corda.node.services.messaging.ArtemisMessagingServer
|
||||
import net.corda.node.services.messaging.RPCServer
|
||||
import net.corda.node.services.messaging.RPCServerConfiguration
|
||||
@ -89,6 +89,7 @@ val fakeNodeLegalName = CordaX500Name(organisation = "Not:a:real:name", locality
|
||||
// Use a global pool so that we can run RPC tests in parallel
|
||||
private val globalPortAllocation = PortAllocation.Incremental(10000)
|
||||
private val globalDebugPortAllocation = PortAllocation.Incremental(5005)
|
||||
private val globalMonitorPortAllocation = PortAllocation.Incremental(7005)
|
||||
|
||||
fun <A> rpcDriver(
|
||||
isDebug: Boolean = false,
|
||||
@ -102,28 +103,31 @@ fun <A> rpcDriver(
|
||||
extraCordappPackagesToScan: List<String> = emptyList(),
|
||||
notarySpecs: List<NotarySpec> = emptyList(),
|
||||
externalTrace: Trace? = null,
|
||||
jmxPolicy: JmxPolicy = JmxPolicy(),
|
||||
dsl: RPCDriverDSL.() -> A
|
||||
) : A {
|
||||
return genericDriver(
|
||||
driverDsl = RPCDriverDSL(
|
||||
DriverDSLImpl(
|
||||
portAllocation = portAllocation,
|
||||
debugPortAllocation = debugPortAllocation,
|
||||
systemProperties = systemProperties,
|
||||
driverDirectory = driverDirectory.toAbsolutePath(),
|
||||
useTestClock = useTestClock,
|
||||
isDebug = isDebug,
|
||||
startNodesInProcess = startNodesInProcess,
|
||||
waitForNodesToFinish = waitForNodesToFinish,
|
||||
extraCordappPackagesToScan = extraCordappPackagesToScan,
|
||||
notarySpecs = notarySpecs,
|
||||
compatibilityZone = null
|
||||
), externalTrace
|
||||
),
|
||||
coerce = { it },
|
||||
dsl = dsl,
|
||||
initialiseSerialization = false
|
||||
)}
|
||||
driverDsl = RPCDriverDSL(
|
||||
DriverDSLImpl(
|
||||
portAllocation = portAllocation,
|
||||
debugPortAllocation = debugPortAllocation,
|
||||
systemProperties = systemProperties,
|
||||
driverDirectory = driverDirectory.toAbsolutePath(),
|
||||
useTestClock = useTestClock,
|
||||
isDebug = isDebug,
|
||||
startNodesInProcess = startNodesInProcess,
|
||||
waitForNodesToFinish = waitForNodesToFinish,
|
||||
extraCordappPackagesToScan = extraCordappPackagesToScan,
|
||||
notarySpecs = notarySpecs,
|
||||
jmxPolicy = jmxPolicy,
|
||||
compatibilityZone = null
|
||||
), externalTrace
|
||||
),
|
||||
coerce = { it },
|
||||
dsl = dsl,
|
||||
initialiseSerialization = false
|
||||
)
|
||||
}
|
||||
|
||||
private class SingleUserSecurityManager(val rpcUser: User) : ActiveMQSecurityManager3 {
|
||||
override fun validateUser(user: String?, password: String?) = isValid(user, password)
|
||||
@ -428,17 +432,13 @@ data class RPCDriverDSL(
|
||||
minLargeMessageSize = ArtemisMessagingServer.MAX_FILE_SIZE
|
||||
isUseGlobalPools = false
|
||||
}
|
||||
val userService = object : RPCUserService {
|
||||
override fun getUser(username: String): User? = if (username == rpcUser.username) rpcUser else null
|
||||
override val users: List<User> get() = listOf(rpcUser)
|
||||
override val id: AuthServiceId = AuthServiceId("RPC_DRIVER")
|
||||
}
|
||||
val rpcSecurityManager = RPCSecurityManagerImpl.fromUserList(users = listOf(rpcUser), id = AuthServiceId("TEST_SECURITY_MANAGER"))
|
||||
val rpcServer = RPCServer(
|
||||
ops,
|
||||
rpcUser.username,
|
||||
rpcUser.password,
|
||||
locator,
|
||||
userService,
|
||||
rpcSecurityManager,
|
||||
nodeLegalName,
|
||||
configuration
|
||||
)
|
||||
|
@ -5,7 +5,7 @@ package net.corda.testing.internal.demorun
|
||||
import net.corda.cordform.CordformDefinition
|
||||
import net.corda.cordform.CordformNode
|
||||
import net.corda.core.utilities.NetworkHostAndPort
|
||||
import net.corda.core.utilities.getOrThrow
|
||||
import net.corda.testing.driver.JmxPolicy
|
||||
import net.corda.testing.driver.PortAllocation
|
||||
import net.corda.testing.internal.internalDriver
|
||||
|
||||
@ -39,6 +39,7 @@ private fun CordformDefinition.runNodes(waitForAllNodesToFinish: Boolean, block:
|
||||
.max()!!
|
||||
internalDriver(
|
||||
isDebug = true,
|
||||
jmxPolicy = JmxPolicy(true),
|
||||
driverDirectory = nodesDirectory,
|
||||
extraCordappPackagesToScan = cordappPackages,
|
||||
// Notaries are manually specified in Cordform so we don't want the driver automatically starting any
|
||||
|
@ -0,0 +1,96 @@
|
||||
package net.corda.node.internal.security
|
||||
|
||||
import org.hamcrest.CoreMatchers.containsString
|
||||
import org.hamcrest.MatcherAssert.assertThat
|
||||
import org.hamcrest.core.IsEqual.equalTo
|
||||
import org.hamcrest.core.IsNot.not
|
||||
import org.junit.Test
|
||||
|
||||
internal class PasswordTest {
|
||||
|
||||
@Test
|
||||
fun immutability() {
|
||||
|
||||
val charArray = "dadada".toCharArray()
|
||||
val password = Password(charArray)
|
||||
assertThat(password.value, equalTo(charArray))
|
||||
|
||||
charArray[0] = 'm'
|
||||
assertThat(password.value, not(equalTo(charArray)))
|
||||
|
||||
val value = password.value
|
||||
value[1] = 'e'
|
||||
assertThat(password.value, not(equalTo(value)))
|
||||
}
|
||||
|
||||
@Test
|
||||
fun constructor_and_getters() {
|
||||
|
||||
val value = "dadada"
|
||||
|
||||
assertThat(Password(value.toCharArray()).value, equalTo(value.toCharArray()))
|
||||
assertThat(Password(value.toCharArray()).valueAsString, equalTo(value))
|
||||
|
||||
assertThat(Password(value).value, equalTo(value.toCharArray()))
|
||||
assertThat(Password(value).valueAsString, equalTo(value))
|
||||
}
|
||||
|
||||
@Test
|
||||
fun equals() {
|
||||
|
||||
val passwordValue1 = Password("value1")
|
||||
val passwordValue2 = Password("value2")
|
||||
val passwordValue12 = Password("value1")
|
||||
|
||||
assertThat(passwordValue1, equalTo(passwordValue1))
|
||||
|
||||
assertThat(passwordValue1, not(equalTo(passwordValue2)))
|
||||
assertThat(passwordValue2, not(equalTo(passwordValue1)))
|
||||
|
||||
assertThat(passwordValue1, equalTo(passwordValue12))
|
||||
assertThat(passwordValue12, equalTo(passwordValue1))
|
||||
}
|
||||
|
||||
@Test
|
||||
fun hashcode() {
|
||||
|
||||
val passwordValue1 = Password("value1")
|
||||
val passwordValue2 = Password("value2")
|
||||
val passwordValue12 = Password("value1")
|
||||
|
||||
assertThat(passwordValue1.hashCode(), equalTo(passwordValue1.hashCode()))
|
||||
|
||||
// not strictly required by hashCode() contract, but desirable
|
||||
assertThat(passwordValue1.hashCode(), not(equalTo(passwordValue2.hashCode())))
|
||||
assertThat(passwordValue2.hashCode(), not(equalTo(passwordValue1.hashCode())))
|
||||
|
||||
assertThat(passwordValue1.hashCode(), equalTo(passwordValue12.hashCode()))
|
||||
assertThat(passwordValue12.hashCode(), equalTo(passwordValue1.hashCode()))
|
||||
}
|
||||
|
||||
@Test
|
||||
fun close() {
|
||||
|
||||
val value = "ipjd1@pijmps112112"
|
||||
val password = Password(value)
|
||||
|
||||
password.use {
|
||||
val readValue = it.valueAsString
|
||||
assertThat(readValue, equalTo(value))
|
||||
}
|
||||
|
||||
val readValue = password.valueAsString
|
||||
assertThat(readValue, not(equalTo(value)))
|
||||
}
|
||||
|
||||
@Test
|
||||
fun toString_is_masked() {
|
||||
|
||||
val value = "ipjd1@pijmps112112"
|
||||
val password = Password(value)
|
||||
|
||||
val toString = password.toString()
|
||||
|
||||
assertThat(toString, not(containsString(value)))
|
||||
}
|
||||
}
|
@ -24,6 +24,7 @@ import net.corda.node.services.Permissions.Companion.startFlow
|
||||
import net.corda.nodeapi.internal.config.User
|
||||
import net.corda.testing.ALICE
|
||||
import net.corda.testing.BOB
|
||||
import net.corda.testing.driver.JmxPolicy
|
||||
import net.corda.testing.driver.NodeHandle
|
||||
import net.corda.testing.driver.PortAllocation
|
||||
import net.corda.testing.driver.driver
|
||||
@ -64,14 +65,14 @@ class ExplorerSimulation(private val options: OptionSet) {
|
||||
|
||||
private fun startDemoNodes() {
|
||||
val portAllocation = PortAllocation.Incremental(20000)
|
||||
driver(portAllocation = portAllocation, extraCordappPackagesToScan = listOf("net.corda.finance"), waitForAllNodesToFinish = true) {
|
||||
driver(portAllocation = portAllocation, extraCordappPackagesToScan = listOf("net.corda.finance"), waitForAllNodesToFinish = true, jmxPolicy = JmxPolicy(true)) {
|
||||
// TODO : Supported flow should be exposed somehow from the node instead of set of ServiceInfo.
|
||||
val alice = startNode(providedName = ALICE.name, rpcUsers = listOf(user))
|
||||
val alice = startNode(providedName = ALICE.name, rpcUsers = listOf(user), customOverrides = mapOf("devMode" to "true"))
|
||||
val bob = startNode(providedName = BOB.name, rpcUsers = listOf(user))
|
||||
val ukBankName = CordaX500Name(organisation = "UK Bank Plc", locality = "London", country = "GB")
|
||||
val usaBankName = CordaX500Name(organisation = "USA Bank Corp", locality = "New York", country = "US")
|
||||
val issuerGBP = startNode(providedName = ukBankName, rpcUsers = listOf(manager),
|
||||
customOverrides = mapOf("issuableCurrencies" to listOf("GBP")))
|
||||
customOverrides = mapOf("issuableCurrencies" to listOf("GBP"), "" to "true"))
|
||||
val issuerUSD = startNode(providedName = usaBankName, rpcUsers = listOf(manager),
|
||||
customOverrides = mapOf("issuableCurrencies" to listOf("USD")))
|
||||
|
||||
|
@ -22,6 +22,8 @@ import net.corda.nodeapi.VerifierApi
|
||||
import net.corda.nodeapi.internal.ArtemisMessagingComponent.Companion.NODE_USER
|
||||
import net.corda.nodeapi.internal.config.NodeSSLConfiguration
|
||||
import net.corda.nodeapi.internal.config.SSLConfiguration
|
||||
import net.corda.nodeapi.internal.ArtemisMessagingComponent.Companion.NODE_USER
|
||||
import net.corda.testing.driver.JmxPolicy
|
||||
import net.corda.testing.driver.NodeHandle
|
||||
import net.corda.testing.driver.PortAllocation
|
||||
import net.corda.testing.driver.driver
|
||||
@ -59,6 +61,7 @@ fun <A> verifierDriver(
|
||||
waitForNodesToFinish: Boolean = false,
|
||||
extraCordappPackagesToScan: List<String> = emptyList(),
|
||||
notarySpecs: List<NotarySpec> = emptyList(),
|
||||
jmxPolicy: JmxPolicy = JmxPolicy(),
|
||||
dsl: VerifierDriverDSL.() -> A
|
||||
) = genericDriver(
|
||||
driverDsl = VerifierDriverDSL(
|
||||
@ -73,6 +76,7 @@ fun <A> verifierDriver(
|
||||
waitForNodesToFinish = waitForNodesToFinish,
|
||||
extraCordappPackagesToScan = extraCordappPackagesToScan,
|
||||
notarySpecs = notarySpecs,
|
||||
jmxPolicy = jmxPolicy,
|
||||
compatibilityZone = null
|
||||
)
|
||||
),
|
||||
|
@ -38,7 +38,7 @@ dependencies {
|
||||
compile "org.eclipse.jetty:jetty-servlet:$jetty_version"
|
||||
compile "org.eclipse.jetty:jetty-webapp:$jetty_version"
|
||||
compile "javax.servlet:javax.servlet-api:3.1.0"
|
||||
compile "org.jolokia:jolokia-agent-war:$jolokia_version"
|
||||
compile "org.jolokia:jolokia-war:$jolokia_version"
|
||||
compile "commons-fileupload:commons-fileupload:$fileupload_version"
|
||||
|
||||
// Log4J: logging framework (with SLF4J bindings)
|
||||
|
@ -58,7 +58,7 @@ class NodeWebServer(val config: WebServerConfig) {
|
||||
// Export JMX monitoring statistics and data over REST/JSON.
|
||||
if (config.exportJMXto.split(',').contains("http")) {
|
||||
val classpath = System.getProperty("java.class.path").split(System.getProperty("path.separator"))
|
||||
val warpath = classpath.firstOrNull { it.contains("jolokia-agent-war-2") && it.endsWith(".war") }
|
||||
val warpath = classpath.firstOrNull { it.contains("jolokia-war") && it.endsWith(".war") }
|
||||
if (warpath != null) {
|
||||
handlerCollection.addHandler(WebAppContext().apply {
|
||||
// Find the jolokia WAR file on the classpath.
|
||||
|
Loading…
Reference in New Issue
Block a user