CORDA-1217 Replace Guava caches with Caffeine (#2818)

This commit is contained in:
Rick Parker
2018-03-14 16:07:31 +00:00
committed by GitHub
parent 8f750c0629
commit a24a2105b1
21 changed files with 112 additions and 117 deletions

View File

@ -1,7 +1,8 @@
package net.corda.node.internal.security
import com.google.common.cache.CacheBuilder
import com.google.common.cache.Cache
import com.github.benmanes.caffeine.cache.Cache
import com.github.benmanes.caffeine.cache.Caffeine
import com.google.common.primitives.Ints
import net.corda.core.context.AuthServiceId
import net.corda.core.utilities.loggerFor
@ -94,7 +95,7 @@ class RPCSecurityManagerImpl(config: AuthServiceConfig) : RPCSecurityManager {
return DefaultSecurityManager(realm).also {
// Setup optional cache layer if configured
it.cacheManager = config.options?.cache?.let {
GuavaCacheManager(
CaffeineCacheManager(
timeToLiveSeconds = it.expireAfterSecs,
maxSize = it.maxEntries)
}
@ -257,9 +258,9 @@ private class NodeJdbcRealm(config: SecurityConfiguration.AuthService.DataSource
private typealias ShiroCache<K, V> = org.apache.shiro.cache.Cache<K, V>
/*
* Adapts a [com.google.common.cache.Cache] to a [org.apache.shiro.cache.Cache] implementation.
* Adapts a [com.github.benmanes.caffeine.cache.Cache] to a [org.apache.shiro.cache.Cache] implementation.
*/
private fun <K, V> Cache<K, V>.toShiroCache(name: String) = object : ShiroCache<K, V> {
private fun <K : Any, V> Cache<K, V>.toShiroCache(name: String) = object : ShiroCache<K, V> {
val name = name
private val impl = this@toShiroCache
@ -282,7 +283,7 @@ private fun <K, V> Cache<K, V>.toShiroCache(name: String) = object : ShiroCache<
impl.invalidateAll()
}
override fun size() = Ints.checkedCast(impl.size())
override fun size() = Ints.checkedCast(impl.estimatedSize())
override fun keys() = impl.asMap().keys
override fun values() = impl.asMap().values
override fun toString() = "Guava cache adapter [$impl]"
@ -290,22 +291,22 @@ private fun <K, V> Cache<K, V>.toShiroCache(name: String) = object : ShiroCache<
/*
* Implementation of [org.apache.shiro.cache.CacheManager] based on
* cache implementation in [com.google.common.cache]
* cache implementation in [com.github.benmanes.caffeine.cache.Cache]
*/
private class GuavaCacheManager(val maxSize: Long,
val timeToLiveSeconds: Long) : CacheManager {
private class CaffeineCacheManager(val maxSize: Long,
val timeToLiveSeconds: Long) : CacheManager {
private val instances = ConcurrentHashMap<String, ShiroCache<*, *>>()
override fun <K, V> getCache(name: String): ShiroCache<K, V> {
override fun <K : Any, V> getCache(name: String): ShiroCache<K, V> {
val result = instances[name] ?: buildCache<K, V>(name)
instances.putIfAbsent(name, result)
return result as ShiroCache<K, V>
}
private fun <K, V> buildCache(name: String) : ShiroCache<K, V> {
private fun <K : Any, V> buildCache(name: String): ShiroCache<K, V> {
logger.info("Constructing cache '$name' with maximumSize=$maxSize, TTL=${timeToLiveSeconds}s")
return CacheBuilder.newBuilder()
return Caffeine.newBuilder()
.expireAfterWrite(timeToLiveSeconds, TimeUnit.SECONDS)
.maximumSize(maxSize)
.build<K, V>()
@ -313,6 +314,6 @@ private class GuavaCacheManager(val maxSize: Long,
}
companion object {
private val logger = loggerFor<GuavaCacheManager>()
private val logger = loggerFor<CaffeineCacheManager>()
}
}

View File

@ -1,12 +1,13 @@
package net.corda.node.services.messaging
import co.paralleluniverse.common.util.SameThreadExecutor
import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.Serializer
import com.esotericsoftware.kryo.io.Input
import com.esotericsoftware.kryo.io.Output
import com.google.common.cache.Cache
import com.google.common.cache.CacheBuilder
import com.google.common.cache.RemovalListener
import com.github.benmanes.caffeine.cache.Cache
import com.github.benmanes.caffeine.cache.Caffeine
import com.github.benmanes.caffeine.cache.RemovalListener
import com.google.common.collect.HashMultimap
import com.google.common.collect.Multimaps
import com.google.common.collect.SetMultimap
@ -145,11 +146,11 @@ class RPCServer(
}
private fun createObservableSubscriptionMap(): ObservableSubscriptionMap {
val onObservableRemove = RemovalListener<InvocationId, ObservableSubscription> {
log.debug { "Unsubscribing from Observable with id ${it.key} because of ${it.cause}" }
it.value.subscription.unsubscribe()
val onObservableRemove = RemovalListener<InvocationId, ObservableSubscription> { key, value, cause ->
log.debug { "Unsubscribing from Observable with id ${key} because of ${cause}" }
value!!.subscription.unsubscribe()
}
return CacheBuilder.newBuilder().removalListener(onObservableRemove).build()
return Caffeine.newBuilder().removalListener(onObservableRemove).executor(SameThreadExecutor.getExecutor()).build()
}
fun start(activeMqServerControl: ActiveMQServerControl) {

View File

@ -164,9 +164,9 @@ open class PersistentNetworkMapCache(
override fun getNodesByLegalName(name: CordaX500Name): List<NodeInfo> = database.transaction { queryByLegalName(session, name) }
override fun getNodesByLegalIdentityKey(identityKey: PublicKey): List<NodeInfo> = nodesByKeyCache[identityKey]
override fun getNodesByLegalIdentityKey(identityKey: PublicKey): List<NodeInfo> = nodesByKeyCache[identityKey]!!
private val nodesByKeyCache = NonInvalidatingCache<PublicKey, List<NodeInfo>>(1024, 8, { key -> database.transaction { queryByIdentityKey(session, key) } })
private val nodesByKeyCache = NonInvalidatingCache<PublicKey, List<NodeInfo>>(1024, { key -> database.transaction { queryByIdentityKey(session, key) } })
override fun getNodesByOwningKeyIndex(identityKeyIndex: String): List<NodeInfo> {
return database.transaction {
@ -176,9 +176,9 @@ open class PersistentNetworkMapCache(
override fun getNodeByAddress(address: NetworkHostAndPort): NodeInfo? = database.transaction { queryByAddress(session, address) }
override fun getPeerCertificateByLegalName(name: CordaX500Name): PartyAndCertificate? = identityByLegalNameCache.get(name).orElse(null)
override fun getPeerCertificateByLegalName(name: CordaX500Name): PartyAndCertificate? = identityByLegalNameCache.get(name)!!.orElse(null)
private val identityByLegalNameCache = NonInvalidatingCache<CordaX500Name, Optional<PartyAndCertificate>>(1024, 8, { name -> Optional.ofNullable(database.transaction { queryIdentityByLegalName(session, name) }) })
private val identityByLegalNameCache = NonInvalidatingCache<CordaX500Name, Optional<PartyAndCertificate>>(1024, { name -> Optional.ofNullable(database.transaction { queryIdentityByLegalName(session, name) }) })
override fun track(): DataFeed<List<NodeInfo>, MapChange> {
synchronized(_changed) {

View File

@ -1,7 +1,7 @@
package net.corda.node.services.persistence
import com.codahale.metrics.MetricRegistry
import com.google.common.cache.Weigher
import com.github.benmanes.caffeine.cache.Weigher
import com.google.common.hash.HashCode
import com.google.common.hash.Hashing
import com.google.common.hash.HashingInputStream
@ -24,7 +24,6 @@ import net.corda.node.services.config.NodeConfiguration
import net.corda.node.services.vault.HibernateAttachmentQueryCriteriaParser
import net.corda.node.utilities.NonInvalidatingCache
import net.corda.node.utilities.NonInvalidatingWeightBasedCache
import net.corda.node.utilities.defaultCordaCacheConcurrencyLevel
import net.corda.nodeapi.internal.persistence.NODE_DATABASE_PREFIX
import net.corda.nodeapi.internal.persistence.currentDBSession
import net.corda.nodeapi.internal.withContractsInJar
@ -209,7 +208,6 @@ class NodeAttachmentService(
private val attachmentContentCache = NonInvalidatingWeightBasedCache<SecureHash, Optional<Pair<Attachment, ByteArray>>>(
maxWeight = attachmentContentCacheSize,
concurrencyLevel = defaultCordaCacheConcurrencyLevel,
weigher = object : Weigher<SecureHash, Optional<Pair<Attachment, ByteArray>>> {
override fun weigh(key: SecureHash, value: Optional<Pair<Attachment, ByteArray>>): Int {
return key.size + if (value.isPresent) value.get().second.size else 0
@ -234,12 +232,11 @@ class NodeAttachmentService(
private val attachmentCache = NonInvalidatingCache<SecureHash, Optional<Attachment>>(
attachmentCacheBound,
defaultCordaCacheConcurrencyLevel,
{ key -> Optional.ofNullable(createAttachment(key)) }
)
private fun createAttachment(key: SecureHash): Attachment? {
val content = attachmentContentCache.get(key)
val content = attachmentContentCache.get(key)!!
if (content.isPresent) {
return content.get().first
}
@ -249,7 +246,7 @@ class NodeAttachmentService(
}
override fun openAttachment(id: SecureHash): Attachment? {
val attachment = attachmentCache.get(id)
val attachment = attachmentCache.get(id)!!
if (attachment.isPresent) {
return attachment.get()
}

View File

@ -1,7 +1,7 @@
package net.corda.node.utilities
import com.google.common.cache.LoadingCache
import com.google.common.cache.Weigher
import com.github.benmanes.caffeine.cache.LoadingCache
import com.github.benmanes.caffeine.cache.Weigher
import net.corda.core.utilities.contextLogger
import net.corda.nodeapi.internal.persistence.currentDBSession
import java.util.*
@ -29,7 +29,7 @@ abstract class AppendOnlyPersistentMapBase<K, V, E, out EK>(
* Returns the value associated with the key, first loading that value from the storage if necessary.
*/
operator fun get(key: K): V? {
return cache.get(key).orElse(null)
return cache.get(key)!!.orElse(null)
}
val size get() = allPersisted().toList().size
@ -62,7 +62,7 @@ abstract class AppendOnlyPersistentMapBase<K, V, E, out EK>(
} else {
Optional.of(value)
}
}
}!!
if (!insertionAttempt) {
if (existingInCache.isPresent) {
// Key already exists in cache, do nothing.
@ -71,7 +71,7 @@ abstract class AppendOnlyPersistentMapBase<K, V, E, out EK>(
// This happens when the key was queried before with no value associated. We invalidate the cached null
// value and recursively call set again. This is to avoid race conditions where another thread queries after
// the invalidate but before the set.
cache.invalidate(key)
cache.invalidate(key!!)
return set(key, value, logWarning, store)
}
}
@ -148,7 +148,6 @@ class AppendOnlyPersistentMap<K, V, E, out EK>(
//TODO determine cacheBound based on entity class later or with node config allowing tuning, or using some heuristic based on heap size
override val cache = NonInvalidatingCache<K, Optional<V>>(
bound = cacheBound,
concurrencyLevel = 8,
loadFunction = { key -> Optional.ofNullable(loadValue(key)) })
}
@ -166,7 +165,6 @@ class WeightBasedAppendOnlyPersistentMap<K, V, E, out EK>(
persistentEntityClass) {
override val cache = NonInvalidatingWeightBasedCache<K, Optional<V>>(
maxWeight = maxWeight,
concurrencyLevel = 8,
weigher = object : Weigher<K, Optional<V>> {
override fun weigh(key: K, value: Optional<V>): Int {
return weighingFunc(key, value)

View File

@ -1,29 +1,28 @@
package net.corda.node.utilities
import com.google.common.cache.CacheBuilder
import com.google.common.cache.CacheLoader
import com.google.common.cache.LoadingCache
import com.google.common.cache.Weigher
import com.google.common.util.concurrent.ListenableFuture
import com.github.benmanes.caffeine.cache.CacheLoader
import com.github.benmanes.caffeine.cache.Caffeine
import com.github.benmanes.caffeine.cache.LoadingCache
import com.github.benmanes.caffeine.cache.Weigher
class NonInvalidatingCache<K, V> private constructor(
val cache: LoadingCache<K, V>
) : LoadingCache<K, V> by cache {
constructor(bound: Long, concurrencyLevel: Int, loadFunction: (K) -> V) :
this(buildCache(bound, concurrencyLevel, loadFunction))
constructor(bound: Long, loadFunction: (K) -> V) :
this(buildCache(bound, loadFunction))
private companion object {
private fun <K, V> buildCache(bound: Long, concurrencyLevel: Int, loadFunction: (K) -> V): LoadingCache<K, V> {
val builder = CacheBuilder.newBuilder().maximumSize(bound).concurrencyLevel(concurrencyLevel)
private fun <K, V> buildCache(bound: Long, loadFunction: (K) -> V): LoadingCache<K, V> {
val builder = Caffeine.newBuilder().maximumSize(bound)
return builder.build(NonInvalidatingCacheLoader(loadFunction))
}
}
// TODO look into overriding loadAll() if we ever use it
class NonInvalidatingCacheLoader<K, V>(val loadFunction: (K) -> V) : CacheLoader<K, V>() {
override fun reload(key: K, oldValue: V): ListenableFuture<V> {
class NonInvalidatingCacheLoader<K, V>(val loadFunction: (K) -> V) : CacheLoader<K, V> {
override fun reload(key: K, oldValue: V): V {
throw IllegalStateException("Non invalidating cache refreshed")
}
@ -34,16 +33,14 @@ class NonInvalidatingCache<K, V> private constructor(
class NonInvalidatingWeightBasedCache<K, V> private constructor(
val cache: LoadingCache<K, V>
) : LoadingCache<K, V> by cache {
constructor (maxWeight: Long, concurrencyLevel: Int, weigher: Weigher<K, V>, loadFunction: (K) -> V) :
this(buildCache(maxWeight, concurrencyLevel, weigher, loadFunction))
constructor (maxWeight: Long, weigher: Weigher<K, V>, loadFunction: (K) -> V) :
this(buildCache(maxWeight, weigher, loadFunction))
private companion object {
private fun <K, V> buildCache(maxWeight: Long, concurrencyLevel: Int, weigher: Weigher<K, V>, loadFunction: (K) -> V): LoadingCache<K, V> {
val builder = CacheBuilder.newBuilder().maximumWeight(maxWeight).weigher(weigher).concurrencyLevel(concurrencyLevel)
private fun <K, V> buildCache(maxWeight: Long, weigher: Weigher<K, V>, loadFunction: (K) -> V): LoadingCache<K, V> {
val builder = Caffeine.newBuilder().maximumWeight(maxWeight).weigher(weigher)
return builder.build(NonInvalidatingCache.NonInvalidatingCacheLoader(loadFunction))
}
}
}
val defaultCordaCacheConcurrencyLevel: Int = 8
}

View File

@ -1,21 +1,23 @@
package net.corda.node.utilities
import com.google.common.cache.*
import com.google.common.util.concurrent.ListenableFuture
import co.paralleluniverse.common.util.SameThreadExecutor
import com.github.benmanes.caffeine.cache.CacheLoader
import com.github.benmanes.caffeine.cache.Caffeine
import com.github.benmanes.caffeine.cache.LoadingCache
import com.github.benmanes.caffeine.cache.RemovalListener
class NonInvalidatingUnboundCache<K, V> private constructor(
val cache: LoadingCache<K, V>
) : LoadingCache<K, V> by cache {
constructor(concurrencyLevel: Int, loadFunction: (K) -> V, removalListener: RemovalListener<K, V> = RemovalListener {},
constructor(loadFunction: (K) -> V, removalListener: RemovalListener<K, V> = RemovalListener { key, value, cause -> },
keysToPreload: () -> Iterable<K> = { emptyList() }) :
this(buildCache(concurrencyLevel, loadFunction, removalListener, keysToPreload))
this(buildCache(loadFunction, removalListener, keysToPreload))
private companion object {
private fun <K, V> buildCache(concurrencyLevel: Int, loadFunction: (K) -> V, removalListener: RemovalListener<K, V>,
private fun <K, V> buildCache(loadFunction: (K) -> V, removalListener: RemovalListener<K, V>,
keysToPreload: () -> Iterable<K>): LoadingCache<K, V> {
val builder = CacheBuilder.newBuilder().concurrencyLevel(concurrencyLevel).removalListener(removalListener)
val builder = Caffeine.newBuilder().removalListener(removalListener).executor(SameThreadExecutor.getExecutor())
return builder.build(NonInvalidatingCacheLoader(loadFunction)).apply {
getAll(keysToPreload())
}
@ -23,8 +25,8 @@ class NonInvalidatingUnboundCache<K, V> private constructor(
}
// TODO look into overriding loadAll() if we ever use it
private class NonInvalidatingCacheLoader<K, V>(val loadFunction: (K) -> V) : CacheLoader<K, V>() {
override fun reload(key: K, oldValue: V): ListenableFuture<V> {
private class NonInvalidatingCacheLoader<K, V>(val loadFunction: (K) -> V) : CacheLoader<K, V> {
override fun reload(key: K, oldValue: V): V {
throw IllegalStateException("Non invalidating cache refreshed")
}

View File

@ -1,8 +1,7 @@
package net.corda.node.utilities
import com.google.common.cache.RemovalCause
import com.google.common.cache.RemovalListener
import com.google.common.cache.RemovalNotification
import com.github.benmanes.caffeine.cache.RemovalCause
import com.github.benmanes.caffeine.cache.RemovalListener
import net.corda.core.utilities.contextLogger
import net.corda.nodeapi.internal.persistence.currentDBSession
import java.util.*
@ -10,7 +9,7 @@ import java.util.*
/**
* Implements an unbound caching layer on top of a table accessed via Hibernate mapping.
*/
class PersistentMap<K, V, E, out EK>(
class PersistentMap<K : Any, V, E, out EK>(
val toPersistentEntityKey: (K) -> EK,
val fromPersistentEntity: (E) -> Pair<K, V>,
val toPersistentEntity: (key: K, value: V) -> E,
@ -22,7 +21,6 @@ class PersistentMap<K, V, E, out EK>(
}
private val cache = NonInvalidatingUnboundCache(
concurrencyLevel = 8,
loadFunction = { key -> Optional.ofNullable(loadValue(key)) },
removalListener = ExplicitRemoval(toPersistentEntityKey, persistentEntityClass)
).apply {
@ -34,11 +32,11 @@ class PersistentMap<K, V, E, out EK>(
}
class ExplicitRemoval<K, V, E, EK>(private val toPersistentEntityKey: (K) -> EK, private val persistentEntityClass: Class<E>) : RemovalListener<K, V> {
override fun onRemoval(notification: RemovalNotification<K, V>?) {
when (notification?.cause) {
override fun onRemoval(key: K?, value: V?, cause: RemovalCause) {
when (cause) {
RemovalCause.EXPLICIT -> {
val session = currentDBSession()
val elem = session.find(persistentEntityClass, toPersistentEntityKey(notification.key))
val elem = session.find(persistentEntityClass, toPersistentEntityKey(key!!))
if (elem != null) {
session.remove(elem)
}
@ -53,14 +51,14 @@ class PersistentMap<K, V, E, out EK>(
}
override operator fun get(key: K): V? {
return cache.get(key).orElse(null)
return cache.get(key)!!.orElse(null)
}
fun all(): Sequence<Pair<K, V>> {
return cache.asMap().asSequence().filter { it.value.isPresent }.map { Pair(it.key, it.value.get()) }
}
override val size get() = cache.size().toInt()
override val size get() = cache.estimatedSize().toInt()
private tailrec fun set(key: K, value: V, logWarning: Boolean = true, store: (K, V) -> V?, replace: (K, V) -> Unit): Boolean {
var insertionAttempt = false
@ -72,7 +70,7 @@ class PersistentMap<K, V, E, out EK>(
// Store the value, depending on store implementation this may replace existing entry in DB.
store(key, value)
Optional.of(value)
}
}!!
if (!insertionAttempt) {
if (existingInCache.isPresent) {
// Key already exists in cache, store the new value in the DB (depends on tore implementation) and refresh cache.
@ -165,7 +163,7 @@ class PersistentMap<K, V, E, out EK>(
* Removes the mapping for the specified key from this map and underlying storage if present.
*/
override fun remove(key: K): V? {
val result = cache.get(key).orElse(null)
val result = cache.get(key)!!.orElse(null)
cache.invalidate(key)
return result
}
@ -253,7 +251,7 @@ class PersistentMap<K, V, E, out EK>(
override fun put(key: K, value: V): V? {
val old = cache.get(key)
addWithDuplicatesReplaced(key, value)
return old.orElse(null)
return old!!.orElse(null)
}
fun load() {