ENT-2414 Named caches (#3848)

* Add named caches and apply to NonInvalidingUnboundCache and all usages.

* Add named caches and apply to NonInvalidingCache and all usages.

* Add named caches and apply to NonInvalidingWeightBasedCache and all usages.

* Move NamedCache to core/internal

* Remove type `NamedCache` and `NamedLoadingCache`

* Suppressed 'name not used' warning, added comment, and fixed generic parameters on the buildNamed functions.

* Use `buildNamed` in all caffeine instances in production code. Not using it for caches that are created in test code.

* Add checks for the cache name

* Formatting

* Minor code review revisions
This commit is contained in:
Christian Sailer 2018-08-24 17:17:22 +01:00 committed by GitHub
parent 042b91814a
commit bc330bd989
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
29 changed files with 130 additions and 31 deletions

2
.idea/compiler.xml generated
View File

@ -24,6 +24,8 @@
<module name="buildSrc_test" target="1.8" /> <module name="buildSrc_test" target="1.8" />
<module name="canonicalizer_main" target="1.8" /> <module name="canonicalizer_main" target="1.8" />
<module name="canonicalizer_test" target="1.8" /> <module name="canonicalizer_test" target="1.8" />
<module name="cli_main" target="1.8" />
<module name="cli_test" target="1.8" />
<module name="client_main" target="1.8" /> <module name="client_main" target="1.8" />
<module name="client_test" target="1.8" /> <module name="client_test" target="1.8" />
<module name="cliutils_main" target="1.8" /> <module name="cliutils_main" target="1.8" />

View File

@ -6,6 +6,7 @@ import javafx.collections.FXCollections
import javafx.collections.ObservableList import javafx.collections.ObservableList
import net.corda.client.jfx.utils.* import net.corda.client.jfx.utils.*
import net.corda.core.identity.AnonymousParty import net.corda.core.identity.AnonymousParty
import net.corda.core.internal.buildNamed
import net.corda.core.node.NodeInfo import net.corda.core.node.NodeInfo
import net.corda.core.node.services.NetworkMapCache.MapChange import net.corda.core.node.services.NetworkMapCache.MapChange
import java.security.PublicKey import java.security.PublicKey
@ -30,7 +31,7 @@ class NetworkIdentityModel {
private val rpcProxy by observableValue(NodeMonitorModel::proxyObservable) private val rpcProxy by observableValue(NodeMonitorModel::proxyObservable)
private val identityCache = Caffeine.newBuilder() private val identityCache = Caffeine.newBuilder()
.build<PublicKey, ObservableValue<NodeInfo?>>({ publicKey -> .buildNamed<PublicKey, ObservableValue<NodeInfo?>>("NetworkIdentityModel_identity", { publicKey ->
publicKey.let { rpcProxy.map { it?.cordaRPCOps?.nodeInfoFromParty(AnonymousParty(publicKey)) } } publicKey.let { rpcProxy.map { it?.cordaRPCOps?.nodeInfoFromParty(AnonymousParty(publicKey)) } }
}) })
val notaries = ChosenList(rpcProxy.map { FXCollections.observableList(it?.cordaRPCOps?.notaryIdentities() ?: emptyList()) }, "notaries") val notaries = ChosenList(rpcProxy.map { FXCollections.observableList(it?.cordaRPCOps?.notaryIdentities() ?: emptyList()) }, "notaries")

View File

@ -14,10 +14,7 @@ import net.corda.client.rpc.internal.serialization.amqp.RpcClientObservableDeSer
import net.corda.core.context.Actor import net.corda.core.context.Actor
import net.corda.core.context.Trace import net.corda.core.context.Trace
import net.corda.core.context.Trace.InvocationId import net.corda.core.context.Trace.InvocationId
import net.corda.core.internal.LazyStickyPool import net.corda.core.internal.*
import net.corda.core.internal.LifeCycle
import net.corda.core.internal.ThreadBox
import net.corda.core.internal.times
import net.corda.core.messaging.RPCOps import net.corda.core.messaging.RPCOps
import net.corda.core.serialization.SerializationContext import net.corda.core.serialization.SerializationContext
import net.corda.core.serialization.serialize import net.corda.core.serialization.serialize
@ -162,9 +159,7 @@ class RPCClientProxyHandler(
observablesToReap.locked { observables.add(observableId) } observablesToReap.locked { observables.add(observableId) }
} }
return Caffeine.newBuilder(). return Caffeine.newBuilder().
weakValues(). weakValues().removalListener(onObservableRemove).executor(SameThreadExecutor.getExecutor()).buildNamed("RpcClientProxyHandler_rpcObservable")
removalListener(onObservableRemove).executor(SameThreadExecutor.getExecutor()).
build()
} }
private var sessionFactory: ClientSessionFactory? = null private var sessionFactory: ClientSessionFactory? = null

View File

@ -84,6 +84,9 @@ dependencies {
// Guava: Google utilities library. // Guava: Google utilities library.
testCompile "com.google.guava:guava:$guava_version" testCompile "com.google.guava:guava:$guava_version"
// For caches rather than guava
compile "com.github.ben-manes.caffeine:caffeine:$caffeine_version"
// Smoke tests do NOT have any Node code on the classpath! // Smoke tests do NOT have any Node code on the classpath!
smokeTestCompile project(':smoke-test-utils') smokeTestCompile project(':smoke-test-utils')
smokeTestCompile "org.assertj:assertj-core:${assertj_version}" smokeTestCompile "org.assertj:assertj-core:${assertj_version}"

View File

@ -0,0 +1,41 @@
package net.corda.core.internal
import com.github.benmanes.caffeine.cache.Cache
import com.github.benmanes.caffeine.cache.CacheLoader
import com.github.benmanes.caffeine.cache.Caffeine
import com.github.benmanes.caffeine.cache.LoadingCache
/**
* Restrict the allowed characters of a cache name - this ensures that each cache has a name, and that
* the name can be used to create a file name or a metric name.
*/
internal fun checkCacheName(name: String) {
require(!name.isBlank())
require(allowedChars.matches(name))
}
private val allowedChars = Regex("^[0-9A-Za-z_.]*\$")
/* buildNamed is the central helper method to build caffeine caches in Corda.
* This allows to easily add tweaks to all caches built in Corda, and also forces
* cache users to give their cache a (meaningful) name that can be used e.g. for
* capturing cache traces etc.
*
* Currently it is not used in this version of CORDA, but there are plans to do so.
*/
fun <K, V> Caffeine<in K, in V>.buildNamed(name: String): Cache<K, V> {
checkCacheName(name)
return this.build<K, V>()
}
fun <K, V> Caffeine<in K, in V>.buildNamed(name: String, loadFunc: (K) -> V): LoadingCache<K, V> {
checkCacheName(name)
return this.build<K, V>(loadFunc)
}
fun <K, V> Caffeine<in K, in V>.buildNamed(name: String, loader: CacheLoader<K, V>): LoadingCache<K, V> {
checkCacheName(name)
return this.build<K, V>(loader)
}

View File

@ -0,0 +1,24 @@
package net.corda.core.internal
import org.junit.Test
import kotlin.test.assertEquals
class NamedCacheTest {
fun checkNameHelper(name: String, throws: Boolean) {
var exceptionThrown = false
try {
checkCacheName(name)
} catch (e: Exception) {
exceptionThrown = true
}
assertEquals(throws, exceptionThrown)
}
@Test
fun TestCheckCacheName() {
checkNameHelper("abc_123.234", false)
checkNameHelper("", true)
checkNameHelper("abc 123", true)
checkNameHelper("abc/323", true)
}
}

View File

@ -2,6 +2,7 @@ package net.corda.nodeapi.internal
import com.github.benmanes.caffeine.cache.CacheLoader import com.github.benmanes.caffeine.cache.CacheLoader
import com.github.benmanes.caffeine.cache.Caffeine import com.github.benmanes.caffeine.cache.Caffeine
import net.corda.core.internal.buildNamed
import java.time.Duration import java.time.Duration
import java.util.concurrent.TimeUnit import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicLong import java.util.concurrent.atomic.AtomicLong
@ -9,11 +10,11 @@ import java.util.concurrent.atomic.AtomicLong
/** /**
* A class allowing the deduplication of a strictly incrementing sequence number. * A class allowing the deduplication of a strictly incrementing sequence number.
*/ */
class DeduplicationChecker(cacheExpiry: Duration) { class DeduplicationChecker(cacheExpiry: Duration, name: String = "DeduplicationChecker") {
// dedupe identity -> watermark cache // dedupe identity -> watermark cache
private val watermarkCache = Caffeine.newBuilder() private val watermarkCache = Caffeine.newBuilder()
.expireAfterAccess(cacheExpiry.toNanos(), TimeUnit.NANOSECONDS) .expireAfterAccess(cacheExpiry.toNanos(), TimeUnit.NANOSECONDS)
.build(WatermarkCacheLoader) .buildNamed("${name}_watermark", WatermarkCacheLoader)
private object WatermarkCacheLoader : CacheLoader<Any, AtomicLong> { private object WatermarkCacheLoader : CacheLoader<Any, AtomicLong> {
override fun load(key: Any) = AtomicLong(-1) override fun load(key: Any) = AtomicLong(-1)

View File

@ -1,6 +1,7 @@
package net.corda.nodeapi.internal.persistence package net.corda.nodeapi.internal.persistence
import com.github.benmanes.caffeine.cache.Caffeine import com.github.benmanes.caffeine.cache.Caffeine
import net.corda.core.internal.buildNamed
import net.corda.core.internal.castIfPossible import net.corda.core.internal.castIfPossible
import net.corda.core.schemas.MappedSchema import net.corda.core.schemas.MappedSchema
import net.corda.core.utilities.contextLogger import net.corda.core.utilities.contextLogger
@ -57,7 +58,7 @@ class HibernateConfiguration(
} }
} }
private val sessionFactories = Caffeine.newBuilder().maximumSize(databaseConfig.mappedSchemaCacheSize).build<Set<MappedSchema>, SessionFactory>() private val sessionFactories = Caffeine.newBuilder().maximumSize(databaseConfig.mappedSchemaCacheSize).buildNamed<Set<MappedSchema>, SessionFactory>("HibernateConfiguration_sessionFactories")
val sessionFactoryForRegisteredSchemas = schemas.let { val sessionFactoryForRegisteredSchemas = schemas.let {
logger.info("Init HibernateConfiguration for schemas: $it") logger.info("Init HibernateConfiguration for schemas: $it")

View File

@ -5,6 +5,7 @@ import com.github.benmanes.caffeine.cache.Cache
import com.github.benmanes.caffeine.cache.Caffeine import com.github.benmanes.caffeine.cache.Caffeine
import com.google.common.primitives.Ints import com.google.common.primitives.Ints
import net.corda.core.context.AuthServiceId import net.corda.core.context.AuthServiceId
import net.corda.core.internal.buildNamed
import net.corda.core.internal.uncheckedCast import net.corda.core.internal.uncheckedCast
import net.corda.core.utilities.loggerFor import net.corda.core.utilities.loggerFor
import net.corda.node.internal.DataSourceFactory import net.corda.node.internal.DataSourceFactory
@ -308,7 +309,7 @@ private class CaffeineCacheManager(val maxSize: Long,
return Caffeine.newBuilder() return Caffeine.newBuilder()
.expireAfterWrite(timeToLiveSeconds, TimeUnit.SECONDS) .expireAfterWrite(timeToLiveSeconds, TimeUnit.SECONDS)
.maximumSize(maxSize) .maximumSize(maxSize)
.build<K, V>() .buildNamed<K, V>("RPCSecurityManagerShiroCache_$name")
.toShiroCache() .toShiroCache()
} }

View File

@ -35,6 +35,7 @@ class PersistentIdentityService : SingletonSerializeAsToken(), IdentityServiceIn
fun createPKMap(): AppendOnlyPersistentMap<SecureHash, PartyAndCertificate, PersistentIdentity, String> { fun createPKMap(): AppendOnlyPersistentMap<SecureHash, PartyAndCertificate, PersistentIdentity, String> {
return AppendOnlyPersistentMap( return AppendOnlyPersistentMap(
"PersistentIdentityService_partyByKey",
toPersistentEntityKey = { it.toString() }, toPersistentEntityKey = { it.toString() },
fromPersistentEntity = { fromPersistentEntity = {
Pair( Pair(
@ -51,6 +52,7 @@ class PersistentIdentityService : SingletonSerializeAsToken(), IdentityServiceIn
fun createX500Map(): AppendOnlyPersistentMap<CordaX500Name, SecureHash, PersistentIdentityNames, String> { fun createX500Map(): AppendOnlyPersistentMap<CordaX500Name, SecureHash, PersistentIdentityNames, String> {
return AppendOnlyPersistentMap( return AppendOnlyPersistentMap(
"PersistentIdentityService_partyByName",
toPersistentEntityKey = { it.toString() }, toPersistentEntityKey = { it.toString() },
fromPersistentEntity = { Pair(CordaX500Name.parse(it.name), SecureHash.parse(it.publicKeyHash)) }, fromPersistentEntity = { Pair(CordaX500Name.parse(it.name), SecureHash.parse(it.publicKeyHash)) },
toPersistentEntity = { key: CordaX500Name, value: SecureHash -> toPersistentEntity = { key: CordaX500Name, value: SecureHash ->

View File

@ -49,6 +49,7 @@ class PersistentKeyManagementService(val identityService: PersistentIdentityServ
private companion object { private companion object {
fun createKeyMap(): AppendOnlyPersistentMap<PublicKey, PrivateKey, PersistentKey, String> { fun createKeyMap(): AppendOnlyPersistentMap<PublicKey, PrivateKey, PersistentKey, String> {
return AppendOnlyPersistentMap( return AppendOnlyPersistentMap(
"PersistentKeyManagementService_keys",
toPersistentEntityKey = { it.toStringShort() }, toPersistentEntityKey = { it.toStringShort() },
fromPersistentEntity = { Pair(Crypto.decodePublicKey(it.publicKey), Crypto.decodePrivateKey( fromPersistentEntity = { Pair(Crypto.decodePublicKey(it.publicKey), Crypto.decodePrivateKey(
it.privateKey)) }, it.privateKey)) },

View File

@ -25,6 +25,7 @@ class P2PMessageDeduplicator(private val database: CordaPersistence) {
private fun createProcessedMessages(): AppendOnlyPersistentMap<DeduplicationId, MessageMeta, ProcessedMessage, String> { private fun createProcessedMessages(): AppendOnlyPersistentMap<DeduplicationId, MessageMeta, ProcessedMessage, String> {
return AppendOnlyPersistentMap( return AppendOnlyPersistentMap(
"P2PMessageDeduplicator_processedMessages",
toPersistentEntityKey = { it.toString }, toPersistentEntityKey = { it.toString },
fromPersistentEntity = { Pair(DeduplicationId(it.id), MessageMeta(it.insertionTime, it.hash, it.seqNo)) }, fromPersistentEntity = { Pair(DeduplicationId(it.id), MessageMeta(it.insertionTime, it.hash, it.seqNo)) },
toPersistentEntity = { key: DeduplicationId, value: MessageMeta -> toPersistentEntity = { key: DeduplicationId, value: MessageMeta ->

View File

@ -13,6 +13,7 @@ import net.corda.core.context.Trace
import net.corda.core.context.Trace.InvocationId import net.corda.core.context.Trace.InvocationId
import net.corda.core.identity.CordaX500Name import net.corda.core.identity.CordaX500Name
import net.corda.core.internal.LifeCycle import net.corda.core.internal.LifeCycle
import net.corda.core.internal.buildNamed
import net.corda.core.messaging.RPCOps import net.corda.core.messaging.RPCOps
import net.corda.core.serialization.SerializationContext import net.corda.core.serialization.SerializationContext
import net.corda.core.serialization.SerializationDefaults import net.corda.core.serialization.SerializationDefaults
@ -153,7 +154,7 @@ class RPCServer(
log.debug { "Unsubscribing from Observable with id $key because of $cause" } log.debug { "Unsubscribing from Observable with id $key because of $cause" }
value!!.subscription.unsubscribe() value!!.subscription.unsubscribe()
} }
return Caffeine.newBuilder().removalListener(onObservableRemove).executor(SameThreadExecutor.getExecutor()).build() return Caffeine.newBuilder().removalListener(onObservableRemove).executor(SameThreadExecutor.getExecutor()).buildNamed("RPCServer_observableSubscription")
} }
fun start(activeMqServerControl: ActiveMQServerControl) { fun start(activeMqServerControl: ActiveMQServerControl) {

View File

@ -122,7 +122,9 @@ open class PersistentNetworkMapCache(private val database: CordaPersistence,
override fun getNodesByLegalIdentityKey(identityKey: PublicKey): List<NodeInfo> = nodesByKeyCache[identityKey]!! override fun getNodesByLegalIdentityKey(identityKey: PublicKey): List<NodeInfo> = nodesByKeyCache[identityKey]!!
private val nodesByKeyCache = NonInvalidatingCache<PublicKey, List<NodeInfo>>(1024) { key -> private val nodesByKeyCache = NonInvalidatingCache<PublicKey, List<NodeInfo>>(
"PersistentNetworkMap_nodesByKey",
1024) { key ->
database.transaction { queryByIdentityKey(session, key) } database.transaction { queryByIdentityKey(session, key) }
} }
@ -140,7 +142,9 @@ open class PersistentNetworkMapCache(private val database: CordaPersistence,
return identityByLegalNameCache.get(name)!!.orElse(null) return identityByLegalNameCache.get(name)!!.orElse(null)
} }
private val identityByLegalNameCache = NonInvalidatingCache<CordaX500Name, Optional<PartyAndCertificate>>(1024) { name -> private val identityByLegalNameCache = NonInvalidatingCache<CordaX500Name, Optional<PartyAndCertificate>>(
"PersistentNetworkMap_idByLegalName",
1024) { name ->
Optional.ofNullable(database.transaction { queryIdentityByLegalName(session, name) }) Optional.ofNullable(database.transaction { queryIdentityByLegalName(session, name) })
} }

View File

@ -52,6 +52,7 @@ class DBTransactionStorage(cacheSizeBytes: Long, private val database: CordaPers
fun createTransactionsMap(maxSizeInBytes: Long) fun createTransactionsMap(maxSizeInBytes: Long)
: AppendOnlyPersistentMapBase<SecureHash, TxCacheValue, DBTransaction, String> { : AppendOnlyPersistentMapBase<SecureHash, TxCacheValue, DBTransaction, String> {
return WeightBasedAppendOnlyPersistentMap<SecureHash, TxCacheValue, DBTransaction, String>( return WeightBasedAppendOnlyPersistentMap<SecureHash, TxCacheValue, DBTransaction, String>(
name = "DBTransactionStorage_transactions",
toPersistentEntityKey = { it.toString() }, toPersistentEntityKey = { it.toString() },
fromPersistentEntity = { fromPersistentEntity = {
Pair(SecureHash.parse(it.txId), Pair(SecureHash.parse(it.txId),

View File

@ -206,6 +206,7 @@ class NodeAttachmentService(
// a problem somewhere else or this needs to be revisited. // a problem somewhere else or this needs to be revisited.
private val attachmentContentCache = NonInvalidatingWeightBasedCache( private val attachmentContentCache = NonInvalidatingWeightBasedCache(
name = "NodeAttachmentService_attachmentContent",
maxWeight = attachmentContentCacheSize, maxWeight = attachmentContentCacheSize,
weigher = Weigher<SecureHash, Optional<Pair<Attachment, ByteArray>>> { key, value -> key.size + if (value.isPresent) value.get().second.size else 0 }, weigher = Weigher<SecureHash, Optional<Pair<Attachment, ByteArray>>> { key, value -> key.size + if (value.isPresent) value.get().second.size else 0 },
loadFunction = { Optional.ofNullable(loadAttachmentContent(it)) } loadFunction = { Optional.ofNullable(loadAttachmentContent(it)) }
@ -226,7 +227,9 @@ class NodeAttachmentService(
} }
} }
private val attachmentCache = NonInvalidatingCache<SecureHash, Optional<Attachment>>(attachmentCacheBound) { key -> private val attachmentCache = NonInvalidatingCache<SecureHash, Optional<Attachment>>(
"NodeAttachmentService_attachemnt",
attachmentCacheBound) { key ->
Optional.ofNullable(createAttachment(key)) Optional.ofNullable(createAttachment(key))
} }

View File

@ -48,6 +48,7 @@ class FlowsDrainingModeOperationsImpl(readPhysicalNodeId: () -> String, private
} }
internal val map = PersistentMap( internal val map = PersistentMap(
"FlowDrainingMode_nodeProperties",
{ key -> key }, { key -> key },
{ entity -> entity.key to entity.value!! }, { entity -> entity.key to entity.value!! },
NodePropertiesPersistentStore::DBNodeProperty, NodePropertiesPersistentStore::DBNodeProperty,

View File

@ -102,6 +102,7 @@ class BFTNonValidatingNotaryService(
private fun createMap(): AppendOnlyPersistentMap<StateRef, SecureHash, CommittedState, PersistentStateRef> { private fun createMap(): AppendOnlyPersistentMap<StateRef, SecureHash, CommittedState, PersistentStateRef> {
return AppendOnlyPersistentMap( return AppendOnlyPersistentMap(
"BFTNonValidatingNotaryService_transactions",
toPersistentEntityKey = { PersistentStateRef(it.txhash.toString(), it.index) }, toPersistentEntityKey = { PersistentStateRef(it.txhash.toString(), it.index) },
fromPersistentEntity = { fromPersistentEntity = {
//TODO null check will become obsolete after making DB/JPA columns not nullable //TODO null check will become obsolete after making DB/JPA columns not nullable

View File

@ -77,6 +77,7 @@ class PersistentUniquenessProvider(val clock: Clock) : UniquenessProvider, Singl
private val log = contextLogger() private val log = contextLogger()
fun createMap(): AppendOnlyPersistentMap<StateRef, SecureHash, CommittedState, PersistentStateRef> = fun createMap(): AppendOnlyPersistentMap<StateRef, SecureHash, CommittedState, PersistentStateRef> =
AppendOnlyPersistentMap( AppendOnlyPersistentMap(
"PersistentUniquenessProvider_transactions",
toPersistentEntityKey = { PersistentStateRef(it.txhash.toString(), it.index) }, toPersistentEntityKey = { PersistentStateRef(it.txhash.toString(), it.index) },
fromPersistentEntity = { fromPersistentEntity = {
//TODO null check will become obsolete after making DB/JPA columns not nullable //TODO null check will become obsolete after making DB/JPA columns not nullable

View File

@ -61,6 +61,7 @@ class RaftUniquenessProvider(
private val log = contextLogger() private val log = contextLogger()
fun createMap(): AppendOnlyPersistentMap<StateRef, Pair<Long, SecureHash>, CommittedState, PersistentStateRef> = fun createMap(): AppendOnlyPersistentMap<StateRef, Pair<Long, SecureHash>, CommittedState, PersistentStateRef> =
AppendOnlyPersistentMap( AppendOnlyPersistentMap(
"RaftUniquenessProvider_transactions",
toPersistentEntityKey = { PersistentStateRef(it) }, toPersistentEntityKey = { PersistentStateRef(it) },
fromPersistentEntity = { fromPersistentEntity = {
val txId = it.id.txId val txId = it.id.txId

View File

@ -28,6 +28,7 @@ class ContractUpgradeServiceImpl : ContractUpgradeService, SingletonSerializeAsT
private companion object { private companion object {
fun createContractUpgradesMap(): PersistentMap<String, String, DBContractUpgrade, String> { fun createContractUpgradesMap(): PersistentMap<String, String, DBContractUpgrade, String> {
return PersistentMap( return PersistentMap(
"ContractUpgradeService_upgrades",
toPersistentEntityKey = { it }, toPersistentEntityKey = { it },
fromPersistentEntity = { Pair(it.stateRef, it.upgradedContractClassName ?: "") }, fromPersistentEntity = { Pair(it.stateRef, it.upgradedContractClassName ?: "") },
toPersistentEntity = { key: String, value: String -> toPersistentEntity = { key: String, value: String ->

View File

@ -309,6 +309,7 @@ abstract class AppendOnlyPersistentMapBase<K, V, E, out EK>(
// Open for tests to override // Open for tests to override
open class AppendOnlyPersistentMap<K, V, E, out EK>( open class AppendOnlyPersistentMap<K, V, E, out EK>(
name: String,
toPersistentEntityKey: (K) -> EK, toPersistentEntityKey: (K) -> EK,
fromPersistentEntity: (E) -> Pair<K, V>, fromPersistentEntity: (E) -> Pair<K, V>,
toPersistentEntity: (key: K, value: V) -> E, toPersistentEntity: (key: K, value: V) -> E,
@ -321,6 +322,7 @@ open class AppendOnlyPersistentMap<K, V, E, out EK>(
persistentEntityClass) { persistentEntityClass) {
//TODO determine cacheBound based on entity class later or with node config allowing tuning, or using some heuristic based on heap size //TODO determine cacheBound based on entity class later or with node config allowing tuning, or using some heuristic based on heap size
override val cache = NonInvalidatingCache( override val cache = NonInvalidatingCache(
name = name,
bound = cacheBound, bound = cacheBound,
loadFunction = { key: K -> loadFunction = { key: K ->
// This gets called if a value is read and the cache has no Transactional for this key yet. // This gets called if a value is read and the cache has no Transactional for this key yet.
@ -353,6 +355,7 @@ open class AppendOnlyPersistentMap<K, V, E, out EK>(
// Same as above, but with weighted values (e.g. memory footprint sensitive). // Same as above, but with weighted values (e.g. memory footprint sensitive).
class WeightBasedAppendOnlyPersistentMap<K, V, E, out EK>( class WeightBasedAppendOnlyPersistentMap<K, V, E, out EK>(
name: String,
toPersistentEntityKey: (K) -> EK, toPersistentEntityKey: (K) -> EK,
fromPersistentEntity: (E) -> Pair<K, V>, fromPersistentEntity: (E) -> Pair<K, V>,
toPersistentEntity: (key: K, value: V) -> E, toPersistentEntity: (key: K, value: V) -> E,
@ -365,6 +368,7 @@ class WeightBasedAppendOnlyPersistentMap<K, V, E, out EK>(
toPersistentEntity, toPersistentEntity,
persistentEntityClass) { persistentEntityClass) {
override val cache = NonInvalidatingWeightBasedCache( override val cache = NonInvalidatingWeightBasedCache(
name,
maxWeight = maxWeight, maxWeight = maxWeight,
weigher = Weigher { key, value -> weighingFunc(key, value) }, weigher = Weigher { key, value -> weighingFunc(key, value) },
loadFunction = { key: K -> loadFunction = { key: K ->

View File

@ -4,18 +4,19 @@ import com.github.benmanes.caffeine.cache.CacheLoader
import com.github.benmanes.caffeine.cache.Caffeine import com.github.benmanes.caffeine.cache.Caffeine
import com.github.benmanes.caffeine.cache.LoadingCache import com.github.benmanes.caffeine.cache.LoadingCache
import com.github.benmanes.caffeine.cache.Weigher import com.github.benmanes.caffeine.cache.Weigher
import net.corda.core.internal.buildNamed
class NonInvalidatingCache<K, V> private constructor( class NonInvalidatingCache<K, V> private constructor(
val cache: LoadingCache<K, V> val cache: LoadingCache<K, V>
) : LoadingCache<K, V> by cache { ) : LoadingCache<K, V> by cache {
constructor(bound: Long, loadFunction: (K) -> V) : constructor(name: String, bound: Long, loadFunction: (K) -> V) :
this(buildCache(bound, loadFunction)) this(buildCache(name, bound, loadFunction))
private companion object { private companion object {
private fun <K, V> buildCache(bound: Long, loadFunction: (K) -> V): LoadingCache<K, V> { private fun <K, V> buildCache(name: String, bound: Long, loadFunction: (K) -> V): LoadingCache<K, V> {
val builder = Caffeine.newBuilder().maximumSize(bound) val builder = Caffeine.newBuilder().maximumSize(bound)
return builder.build(NonInvalidatingCacheLoader(loadFunction)) return builder.buildNamed(name, NonInvalidatingCacheLoader(loadFunction))
} }
} }
@ -32,13 +33,13 @@ class NonInvalidatingCache<K, V> private constructor(
class NonInvalidatingWeightBasedCache<K, V> private constructor( class NonInvalidatingWeightBasedCache<K, V> private constructor(
val cache: LoadingCache<K, V> val cache: LoadingCache<K, V>
) : LoadingCache<K, V> by cache { ) : LoadingCache<K, V> by cache {
constructor (maxWeight: Long, weigher: Weigher<K, V>, loadFunction: (K) -> V) : constructor (name: String, maxWeight: Long, weigher: Weigher<K, V>, loadFunction: (K) -> V) :
this(buildCache(maxWeight, weigher, loadFunction)) this(buildCache(name, maxWeight, weigher, loadFunction))
private companion object { private companion object {
private fun <K, V> buildCache(maxWeight: Long, weigher: Weigher<K, V>, loadFunction: (K) -> V): LoadingCache<K, V> { private fun <K, V> buildCache(name: String, maxWeight: Long, weigher: Weigher<K, V>, loadFunction: (K) -> V): LoadingCache<K, V> {
val builder = Caffeine.newBuilder().maximumWeight(maxWeight).weigher(weigher) val builder = Caffeine.newBuilder().maximumWeight(maxWeight).weigher(weigher)
return builder.build(NonInvalidatingCache.NonInvalidatingCacheLoader(loadFunction)) return builder.buildNamed(name, NonInvalidatingCache.NonInvalidatingCacheLoader(loadFunction))
} }
} }
} }

View File

@ -5,20 +5,21 @@ import com.github.benmanes.caffeine.cache.CacheLoader
import com.github.benmanes.caffeine.cache.Caffeine import com.github.benmanes.caffeine.cache.Caffeine
import com.github.benmanes.caffeine.cache.LoadingCache import com.github.benmanes.caffeine.cache.LoadingCache
import com.github.benmanes.caffeine.cache.RemovalListener import com.github.benmanes.caffeine.cache.RemovalListener
import net.corda.core.internal.buildNamed
class NonInvalidatingUnboundCache<K, V> private constructor( class NonInvalidatingUnboundCache<K, V> private constructor(
val cache: LoadingCache<K, V> val cache: LoadingCache<K, V>
) : LoadingCache<K, V> by cache { ) : LoadingCache<K, V> by cache {
constructor(loadFunction: (K) -> V, removalListener: RemovalListener<K, V> = RemovalListener { _, _, _ -> }, constructor(name: String, loadFunction: (K) -> V, removalListener: RemovalListener<K, V> = RemovalListener { _, _, _ -> },
keysToPreload: () -> Iterable<K> = { emptyList() }) : keysToPreload: () -> Iterable<K> = { emptyList() }) :
this(buildCache(loadFunction, removalListener, keysToPreload)) this(buildCache(name, loadFunction, removalListener, keysToPreload))
private companion object { private companion object {
private fun <K, V> buildCache(loadFunction: (K) -> V, removalListener: RemovalListener<K, V>, private fun <K, V> buildCache(name: String, loadFunction: (K) -> V, removalListener: RemovalListener<K, V>,
keysToPreload: () -> Iterable<K>): LoadingCache<K, V> { keysToPreload: () -> Iterable<K>): LoadingCache<K, V> {
val builder = Caffeine.newBuilder().removalListener(removalListener).executor(SameThreadExecutor.getExecutor()) val builder = Caffeine.newBuilder().removalListener(removalListener).executor(SameThreadExecutor.getExecutor())
return builder.build(NonInvalidatingCacheLoader(loadFunction)).apply { return builder.buildNamed(name, NonInvalidatingCacheLoader(loadFunction)).apply {
getAll(keysToPreload()) getAll(keysToPreload())
} }
} }

View File

@ -10,6 +10,7 @@ import java.util.*
* Implements an unbound caching layer on top of a table accessed via Hibernate mapping. * Implements an unbound caching layer on top of a table accessed via Hibernate mapping.
*/ */
class PersistentMap<K : Any, V, E, out EK>( class PersistentMap<K : Any, V, E, out EK>(
name: String,
val toPersistentEntityKey: (K) -> EK, val toPersistentEntityKey: (K) -> EK,
val fromPersistentEntity: (E) -> Pair<K, V>, val fromPersistentEntity: (E) -> Pair<K, V>,
val toPersistentEntity: (key: K, value: V) -> E, val toPersistentEntity: (key: K, value: V) -> E,
@ -21,6 +22,7 @@ class PersistentMap<K : Any, V, E, out EK>(
} }
private val cache = NonInvalidatingUnboundCache( private val cache = NonInvalidatingUnboundCache(
name,
loadFunction = { key -> Optional.ofNullable(loadValue(key)) }, loadFunction = { key -> Optional.ofNullable(loadValue(key)) },
removalListener = ExplicitRemoval(toPersistentEntityKey, persistentEntityClass) removalListener = ExplicitRemoval(toPersistentEntityKey, persistentEntityClass)
) )

View File

@ -271,6 +271,7 @@ class AppendOnlyPersistentMapTest(var scenario: Scenario) {
) )
class TestMap : AppendOnlyPersistentMap<Long, String, PersistentMapEntry, Long>( class TestMap : AppendOnlyPersistentMap<Long, String, PersistentMapEntry, Long>(
"ApoendOnlyPersistentMap_test",
toPersistentEntityKey = { it }, toPersistentEntityKey = { it },
fromPersistentEntity = { Pair(it.key, it.value) }, fromPersistentEntity = { Pair(it.key, it.value) },
toPersistentEntity = { key: Long, value: String -> toPersistentEntity = { key: Long, value: String ->

View File

@ -16,6 +16,7 @@ class PersistentMapTests {
//create a test map using an existing db table //create a test map using an existing db table
private fun createTestMap(): PersistentMap<String, String, ContractUpgradeServiceImpl.DBContractUpgrade, String> { private fun createTestMap(): PersistentMap<String, String, ContractUpgradeServiceImpl.DBContractUpgrade, String> {
return PersistentMap( return PersistentMap(
"Test_test",
toPersistentEntityKey = { it }, toPersistentEntityKey = { it },
fromPersistentEntity = { Pair(it.stateRef, it.upgradedContractClassName ?: "") }, fromPersistentEntity = { Pair(it.stateRef, it.upgradedContractClassName ?: "") },
toPersistentEntity = { key: String, value: String -> toPersistentEntity = { key: String, value: String ->

View File

@ -6,6 +6,7 @@ import net.corda.core.DeleteForDJVM
import net.corda.core.KeepForDJVM import net.corda.core.KeepForDJVM
import net.corda.core.contracts.Attachment import net.corda.core.contracts.Attachment
import net.corda.core.crypto.SecureHash import net.corda.core.crypto.SecureHash
import net.corda.core.internal.buildNamed
import net.corda.core.internal.copyBytes import net.corda.core.internal.copyBytes
import net.corda.core.serialization.* import net.corda.core.serialization.*
import net.corda.core.utilities.ByteSequence import net.corda.core.utilities.ByteSequence
@ -76,7 +77,7 @@ data class SerializationContextImpl @JvmOverloads constructor(override val prefe
*/ */
@DeleteForDJVM @DeleteForDJVM
internal class AttachmentsClassLoaderBuilder(private val properties: Map<Any, Any>, private val deserializationClassLoader: ClassLoader) { internal class AttachmentsClassLoaderBuilder(private val properties: Map<Any, Any>, private val deserializationClassLoader: ClassLoader) {
private val cache: Cache<List<SecureHash>, AttachmentsClassLoader> = Caffeine.newBuilder().weakValues().maximumSize(1024).build() private val cache: Cache<List<SecureHash>, AttachmentsClassLoader> = Caffeine.newBuilder().weakValues().maximumSize(1024).buildNamed("SerializationScheme_attachmentClassloader")
fun build(attachmentHashes: List<SecureHash>): AttachmentsClassLoader? { fun build(attachmentHashes: List<SecureHash>): AttachmentsClassLoader? {
val serializationContext = properties[serializationContextKey] as? SerializeAsTokenContext ?: return null // Some tests don't set one. val serializationContext = properties[serializationContextKey] as? SerializeAsTokenContext ?: return null // Some tests don't set one.

View File

@ -14,6 +14,7 @@ import javafx.scene.image.WritableImage
import javafx.scene.paint.Color import javafx.scene.paint.Color
import javafx.scene.text.TextAlignment import javafx.scene.text.TextAlignment
import net.corda.core.crypto.SecureHash import net.corda.core.crypto.SecureHash
import net.corda.core.internal.buildNamed
/** /**
* (The MIT License) * (The MIT License)
@ -75,7 +76,7 @@ object IdenticonRenderer {
private const val renderingSize = 30.0 private const val renderingSize = 30.0
private val cache = Caffeine.newBuilder().build(CacheLoader<SecureHash, Image> { key -> private val cache = Caffeine.newBuilder().buildNamed("IdentIconRenderer_image", CacheLoader<SecureHash, Image> { key ->
key.let { render(key.hashCode(), renderingSize) } key.let { render(key.hashCode(), renderingSize) }
}) })