From 1ba7b2502cc641be022ebf8ac2f68755c9c0f98b Mon Sep 17 00:00:00 2001 From: "adam.share" Date: Tue, 18 Jul 2023 00:21:34 -0700 Subject: [PATCH] Move serialization outside of cache lock --- .../api/OperationCacheExtensions.kt | 13 ++-- .../api/internal/CacheBatchReader.kt | 31 +-------- .../api/internal/CacheDataTransformer.kt | 53 ++++++++++++++ .../normalized/internal/DefaultApolloStore.kt | 69 ++++++++++--------- 4 files changed, 95 insertions(+), 71 deletions(-) create mode 100644 libraries/apollo-normalized-cache-api-incubating/src/commonMain/kotlin/com/apollographql/apollo3/cache/normalized/api/internal/CacheDataTransformer.kt diff --git a/libraries/apollo-normalized-cache-api-incubating/src/commonMain/kotlin/com/apollographql/apollo3/cache/normalized/api/OperationCacheExtensions.kt b/libraries/apollo-normalized-cache-api-incubating/src/commonMain/kotlin/com/apollographql/apollo3/cache/normalized/api/OperationCacheExtensions.kt index 40b2249980d..128cc677f01 100644 --- a/libraries/apollo-normalized-cache-api-incubating/src/commonMain/kotlin/com/apollographql/apollo3/cache/normalized/api/OperationCacheExtensions.kt +++ b/libraries/apollo-normalized-cache-api-incubating/src/commonMain/kotlin/com/apollographql/apollo3/cache/normalized/api/OperationCacheExtensions.kt @@ -10,6 +10,7 @@ import com.apollographql.apollo3.api.json.MapJsonWriter import com.apollographql.apollo3.api.toJson import com.apollographql.apollo3.api.variables import com.apollographql.apollo3.cache.normalized.api.internal.CacheBatchReader +import com.apollographql.apollo3.cache.normalized.api.internal.CacheDataTransformer import com.apollographql.apollo3.cache.normalized.api.internal.Normalizer fun Operation.normalize( @@ -57,7 +58,6 @@ fun Executable.normalize( .normalize(writer.root() as Map, rootField().selections, rootField().type.rawType()) } - fun Executable.readDataFromCache( customScalarAdapters: CustomScalarAdapters, cache: ReadOnlyNormalizedCache, @@ -106,8 +106,8 @@ private fun Executable.readInternal( cache: ReadOnlyNormalizedCache, cacheResolver: Any, cacheHeaders: CacheHeaders, -): D { - val map = CacheBatchReader( +): CacheDataTransformer { + return CacheBatchReader( cache = cache, cacheHeaders = cacheHeaders, cacheResolver = cacheResolver, @@ -115,12 +115,7 @@ private fun Executable.readInternal( rootKey = cacheKey.key, rootSelections = rootField().selections, rootTypename = rootField().type.rawType().name - ).toMap() - - val reader = MapJsonReader( - root = map, - ) - return adapter().fromJson(reader, customScalarAdapters) + ).collectData(adapter()) } fun Collection?.dependentKeys(): Set { diff --git a/libraries/apollo-normalized-cache-api-incubating/src/commonMain/kotlin/com/apollographql/apollo3/cache/normalized/api/internal/CacheBatchReader.kt b/libraries/apollo-normalized-cache-api-incubating/src/commonMain/kotlin/com/apollographql/apollo3/cache/normalized/api/internal/CacheBatchReader.kt index 0908391b163..7144b77b4b5 100644 --- a/libraries/apollo-normalized-cache-api-incubating/src/commonMain/kotlin/com/apollographql/apollo3/cache/normalized/api/internal/CacheBatchReader.kt +++ b/libraries/apollo-normalized-cache-api-incubating/src/commonMain/kotlin/com/apollographql/apollo3/cache/normalized/api/internal/CacheBatchReader.kt @@ -1,9 +1,9 @@ package com.apollographql.apollo3.cache.normalized.api.internal -import com.apollographql.apollo3.annotations.ApolloInternal import com.apollographql.apollo3.api.CompiledField import com.apollographql.apollo3.api.CompiledFragment import com.apollographql.apollo3.api.CompiledSelection +import com.apollographql.apollo3.api.CompositeAdapter import com.apollographql.apollo3.api.Executable import com.apollographql.apollo3.cache.normalized.api.ApolloResolver import com.apollographql.apollo3.cache.normalized.api.CacheHeaders @@ -83,7 +83,7 @@ internal class CacheBatchReader( } } - fun toMap(): Map { + fun collectData(adapter: CompositeAdapter): CacheDataTransformer { pendingReferences.add( PendingReference( key = rootKey, @@ -132,8 +132,7 @@ internal class CacheBatchReader( } } - @Suppress("UNCHECKED_CAST") - return data[emptyList()].replaceCacheKeys(emptyList()) as Map + return CacheDataTransformer(adapter, data) } /** @@ -179,28 +178,4 @@ internal class CacheBatchReader( } } } - - private fun Any?.replaceCacheKeys(path: List): Any? { - return when (this) { - is CacheKey -> { - data[path].replaceCacheKeys(path) - } - is List<*> -> { - mapIndexed { index, src -> - src.replaceCacheKeys(path + index) - } - } - is Map<*, *> -> { - // This will traverse Map custom scalars but this is ok as it shouldn't contain any CacheKey - mapValues { - it.value.replaceCacheKeys(path + (it.key as String)) - } - } - else -> { - // Scalar value - this - } - } - } } - diff --git a/libraries/apollo-normalized-cache-api-incubating/src/commonMain/kotlin/com/apollographql/apollo3/cache/normalized/api/internal/CacheDataTransformer.kt b/libraries/apollo-normalized-cache-api-incubating/src/commonMain/kotlin/com/apollographql/apollo3/cache/normalized/api/internal/CacheDataTransformer.kt new file mode 100644 index 00000000000..2459d1c2c55 --- /dev/null +++ b/libraries/apollo-normalized-cache-api-incubating/src/commonMain/kotlin/com/apollographql/apollo3/cache/normalized/api/internal/CacheDataTransformer.kt @@ -0,0 +1,53 @@ +package com.apollographql.apollo3.cache.normalized.api.internal + +import com.apollographql.apollo3.api.CompositeAdapter +import com.apollographql.apollo3.api.CustomScalarAdapters +import com.apollographql.apollo3.api.Executable +import com.apollographql.apollo3.api.fromJson +import com.apollographql.apollo3.api.json.MapJsonReader +import com.apollographql.apollo3.cache.normalized.api.CacheKey + +data class CacheDataTransformer( + private val adapter: CompositeAdapter, + private val data: MutableMap, Map>, +) { + fun toData( + customScalarAdapters: CustomScalarAdapters, + ): D { + val reader = MapJsonReader( + root = toMap(), + ) + return adapter.fromJson(reader, customScalarAdapters) + } + + @Suppress("UNCHECKED_CAST") + private fun toMap(): Map { + return data[emptyList()].replaceCacheKeys(emptyList()) as Map + } + + private fun Any?.replaceCacheKeys(path: List): Any? { + return when (this) { + is CacheKey -> { + data[path].replaceCacheKeys(path) + } + + is List<*> -> { + mapIndexed { index, src -> + src.replaceCacheKeys(path + index) + } + } + + is Map<*, *> -> { + // This will traverse Map custom scalars but this is ok as it shouldn't contain any CacheKey + mapValues { + it.value.replaceCacheKeys(path + (it.key as String)) + } + } + + else -> { + // Scalar value + this + } + } + } +} \ No newline at end of file diff --git a/libraries/apollo-normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/apollo3/cache/normalized/internal/DefaultApolloStore.kt b/libraries/apollo-normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/apollo3/cache/normalized/internal/DefaultApolloStore.kt index f18568c3c74..b806a59a2d4 100644 --- a/libraries/apollo-normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/apollo3/cache/normalized/internal/DefaultApolloStore.kt +++ b/libraries/apollo-normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/apollo3/cache/normalized/internal/DefaultApolloStore.kt @@ -16,6 +16,7 @@ import com.apollographql.apollo3.cache.normalized.api.NormalizedCacheFactory import com.apollographql.apollo3.cache.normalized.api.ReadOnlyNormalizedCache import com.apollographql.apollo3.cache.normalized.api.Record import com.apollographql.apollo3.cache.normalized.api.RecordMerger +import com.apollographql.apollo3.cache.normalized.api.internal.CacheDataTransformer import com.apollographql.apollo3.cache.normalized.api.internal.OptimisticCache import com.apollographql.apollo3.cache.normalized.api.normalize import com.apollographql.apollo3.cache.normalized.api.readDataFromCache @@ -115,7 +116,7 @@ internal class DefaultApolloStore( cacheHeaders = cacheHeaders, cacheKey = CacheKey.rootKey() ) - } + }.toData(customScalarAdapters) } override suspend fun readFragment( @@ -132,7 +133,7 @@ internal class DefaultApolloStore( cacheHeaders = cacheHeaders, cacheKey = cacheKey ) - } + }.toData(customScalarAdapters) } @@ -167,15 +168,15 @@ internal class DefaultApolloStore( cacheHeaders: CacheHeaders, publish: Boolean, ): Set { - val changedKeys = lock.write { - val records = fragment.normalize( - data = fragmentData, - customScalarAdapters = customScalarAdapters, - cacheKeyGenerator = cacheKeyGenerator, - metadataGenerator = metadataGenerator, - rootKey = cacheKey.key - ).values + val records = fragment.normalize( + data = fragmentData, + customScalarAdapters = customScalarAdapters, + cacheKeyGenerator = cacheKeyGenerator, + metadataGenerator = metadataGenerator, + rootKey = cacheKey.key + ).values + val changedKeys = lock.write { cache.merge(records, cacheHeaders, recordMerger) } @@ -193,21 +194,22 @@ internal class DefaultApolloStore( publish: Boolean, customScalarAdapters: CustomScalarAdapters, ): Pair, Set> { - val (records, changedKeys) = lock.write { - val records = operation.normalize( - data = operationData, - customScalarAdapters = customScalarAdapters, - cacheKeyGenerator = cacheKeyGenerator, - metadataGenerator = metadataGenerator, - ) + val records = operation.normalize( + data = operationData, + customScalarAdapters = customScalarAdapters, + cacheKeyGenerator = cacheKeyGenerator, + metadataGenerator = metadataGenerator, + ).values.toSet() - records to cache.merge(records.values.toList(), cacheHeaders, recordMerger) + val changedKeys = lock.write { + cache.merge(records, cacheHeaders, recordMerger) } + if (publish) { publish(changedKeys) } - return records.values.toSet() to changedKeys + return records to changedKeys } @@ -218,20 +220,20 @@ internal class DefaultApolloStore( customScalarAdapters: CustomScalarAdapters, publish: Boolean, ): Set { - val changedKeys = lock.write { - val records = operation.normalize( - data = operationData, - customScalarAdapters = customScalarAdapters, - cacheKeyGenerator = cacheKeyGenerator, - metadataGenerator = metadataGenerator, - ).values.map { record -> - Record( - key = record.key, - fields = record.fields, - mutationId = mutationId - ) - } + val records = operation.normalize( + data = operationData, + customScalarAdapters = customScalarAdapters, + cacheKeyGenerator = cacheKeyGenerator, + metadataGenerator = metadataGenerator, + ).values.map { record -> + Record( + key = record.key, + fields = record.fields, + mutationId = mutationId + ) + } + val changedKeys = lock.write { /** * TODO: should we forward the cache headers to the optimistic store? */ @@ -281,7 +283,7 @@ internal class DefaultApolloStore( cache: ReadOnlyNormalizedCache, cacheResolver: Any, cacheHeaders: CacheHeaders, - ): D { + ): CacheDataTransformer { return when (cacheResolver) { is CacheResolver -> readDataFromCache( cacheKey, @@ -304,4 +306,3 @@ internal class DefaultApolloStore( } } } -