Skip to content

Commit

Permalink
Move serialization outside of cache lock
Browse files Browse the repository at this point in the history
  • Loading branch information
ashare80 committed Jul 18, 2023
1 parent 547ec6c commit 1ba7b25
Show file tree
Hide file tree
Showing 4 changed files with 95 additions and 71 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import com.apollographql.apollo3.api.json.MapJsonWriter
import com.apollographql.apollo3.api.toJson
import com.apollographql.apollo3.api.variables
import com.apollographql.apollo3.cache.normalized.api.internal.CacheBatchReader
import com.apollographql.apollo3.cache.normalized.api.internal.CacheDataTransformer
import com.apollographql.apollo3.cache.normalized.api.internal.Normalizer

fun <D : Operation.Data> Operation<D>.normalize(
Expand Down Expand Up @@ -57,7 +58,6 @@ fun <D : Executable.Data> Executable<D>.normalize(
.normalize(writer.root() as Map<String, Any?>, rootField().selections, rootField().type.rawType())
}


fun <D : Executable.Data> Executable<D>.readDataFromCache(
customScalarAdapters: CustomScalarAdapters,
cache: ReadOnlyNormalizedCache,
Expand Down Expand Up @@ -106,21 +106,16 @@ private fun <D : Executable.Data> Executable<D>.readInternal(
cache: ReadOnlyNormalizedCache,
cacheResolver: Any,
cacheHeaders: CacheHeaders,
): D {
val map = CacheBatchReader(
): CacheDataTransformer<D> {
return CacheBatchReader(
cache = cache,
cacheHeaders = cacheHeaders,
cacheResolver = cacheResolver,
variables = variables(customScalarAdapters, true),
rootKey = cacheKey.key,
rootSelections = rootField().selections,
rootTypename = rootField().type.rawType().name
).toMap()

val reader = MapJsonReader(
root = map,
)
return adapter().fromJson(reader, customScalarAdapters)
).collectData(adapter())
}

fun Collection<Record>?.dependentKeys(): Set<String> {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
package com.apollographql.apollo3.cache.normalized.api.internal

import com.apollographql.apollo3.annotations.ApolloInternal
import com.apollographql.apollo3.api.CompiledField
import com.apollographql.apollo3.api.CompiledFragment
import com.apollographql.apollo3.api.CompiledSelection
import com.apollographql.apollo3.api.CompositeAdapter
import com.apollographql.apollo3.api.Executable
import com.apollographql.apollo3.cache.normalized.api.ApolloResolver
import com.apollographql.apollo3.cache.normalized.api.CacheHeaders
Expand Down Expand Up @@ -83,7 +83,7 @@ internal class CacheBatchReader(
}
}

fun toMap(): Map<String, Any?> {
fun <D: Executable.Data> collectData(adapter: CompositeAdapter<D>): CacheDataTransformer<D> {
pendingReferences.add(
PendingReference(
key = rootKey,
Expand Down Expand Up @@ -132,8 +132,7 @@ internal class CacheBatchReader(
}
}

@Suppress("UNCHECKED_CAST")
return data[emptyList()].replaceCacheKeys(emptyList()) as Map<String, Any?>
return CacheDataTransformer(adapter, data)
}

/**
Expand Down Expand Up @@ -179,28 +178,4 @@ internal class CacheBatchReader(
}
}
}

private fun Any?.replaceCacheKeys(path: List<Any>): Any? {
return when (this) {
is CacheKey -> {
data[path].replaceCacheKeys(path)
}
is List<*> -> {
mapIndexed { index, src ->
src.replaceCacheKeys(path + index)
}
}
is Map<*, *> -> {
// This will traverse Map custom scalars but this is ok as it shouldn't contain any CacheKey
mapValues {
it.value.replaceCacheKeys(path + (it.key as String))
}
}
else -> {
// Scalar value
this
}
}
}
}

Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
package com.apollographql.apollo3.cache.normalized.api.internal

import com.apollographql.apollo3.api.CompositeAdapter
import com.apollographql.apollo3.api.CustomScalarAdapters
import com.apollographql.apollo3.api.Executable
import com.apollographql.apollo3.api.fromJson
import com.apollographql.apollo3.api.json.MapJsonReader
import com.apollographql.apollo3.cache.normalized.api.CacheKey

data class CacheDataTransformer<D: Executable.Data>(
private val adapter: CompositeAdapter<D>,
private val data: MutableMap<List<Any>, Map<String, Any?>>,
) {
fun toData(
customScalarAdapters: CustomScalarAdapters,
): D {
val reader = MapJsonReader(
root = toMap(),
)
return adapter.fromJson(reader, customScalarAdapters)
}

@Suppress("UNCHECKED_CAST")
private fun toMap(): Map<String, Any?> {
return data[emptyList()].replaceCacheKeys(emptyList()) as Map<String, Any?>
}

private fun Any?.replaceCacheKeys(path: List<Any>): Any? {
return when (this) {
is CacheKey -> {
data[path].replaceCacheKeys(path)
}

is List<*> -> {
mapIndexed { index, src ->
src.replaceCacheKeys(path + index)
}
}

is Map<*, *> -> {
// This will traverse Map custom scalars but this is ok as it shouldn't contain any CacheKey
mapValues {
it.value.replaceCacheKeys(path + (it.key as String))
}
}

else -> {
// Scalar value
this
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import com.apollographql.apollo3.cache.normalized.api.NormalizedCacheFactory
import com.apollographql.apollo3.cache.normalized.api.ReadOnlyNormalizedCache
import com.apollographql.apollo3.cache.normalized.api.Record
import com.apollographql.apollo3.cache.normalized.api.RecordMerger
import com.apollographql.apollo3.cache.normalized.api.internal.CacheDataTransformer
import com.apollographql.apollo3.cache.normalized.api.internal.OptimisticCache
import com.apollographql.apollo3.cache.normalized.api.normalize
import com.apollographql.apollo3.cache.normalized.api.readDataFromCache
Expand Down Expand Up @@ -115,7 +116,7 @@ internal class DefaultApolloStore(
cacheHeaders = cacheHeaders,
cacheKey = CacheKey.rootKey()
)
}
}.toData(customScalarAdapters)
}

override suspend fun <D : Fragment.Data> readFragment(
Expand All @@ -132,7 +133,7 @@ internal class DefaultApolloStore(
cacheHeaders = cacheHeaders,
cacheKey = cacheKey
)
}
}.toData(customScalarAdapters)
}


Expand Down Expand Up @@ -167,15 +168,15 @@ internal class DefaultApolloStore(
cacheHeaders: CacheHeaders,
publish: Boolean,
): Set<String> {
val changedKeys = lock.write {
val records = fragment.normalize(
data = fragmentData,
customScalarAdapters = customScalarAdapters,
cacheKeyGenerator = cacheKeyGenerator,
metadataGenerator = metadataGenerator,
rootKey = cacheKey.key
).values
val records = fragment.normalize(
data = fragmentData,
customScalarAdapters = customScalarAdapters,
cacheKeyGenerator = cacheKeyGenerator,
metadataGenerator = metadataGenerator,
rootKey = cacheKey.key
).values

val changedKeys = lock.write {
cache.merge(records, cacheHeaders, recordMerger)
}

Expand All @@ -193,21 +194,22 @@ internal class DefaultApolloStore(
publish: Boolean,
customScalarAdapters: CustomScalarAdapters,
): Pair<Set<Record>, Set<String>> {
val (records, changedKeys) = lock.write {
val records = operation.normalize(
data = operationData,
customScalarAdapters = customScalarAdapters,
cacheKeyGenerator = cacheKeyGenerator,
metadataGenerator = metadataGenerator,
)
val records = operation.normalize(
data = operationData,
customScalarAdapters = customScalarAdapters,
cacheKeyGenerator = cacheKeyGenerator,
metadataGenerator = metadataGenerator,
).values.toSet()

records to cache.merge(records.values.toList(), cacheHeaders, recordMerger)
val changedKeys = lock.write {
cache.merge(records, cacheHeaders, recordMerger)
}

if (publish) {
publish(changedKeys)
}

return records.values.toSet() to changedKeys
return records to changedKeys
}


Expand All @@ -218,20 +220,20 @@ internal class DefaultApolloStore(
customScalarAdapters: CustomScalarAdapters,
publish: Boolean,
): Set<String> {
val changedKeys = lock.write {
val records = operation.normalize(
data = operationData,
customScalarAdapters = customScalarAdapters,
cacheKeyGenerator = cacheKeyGenerator,
metadataGenerator = metadataGenerator,
).values.map { record ->
Record(
key = record.key,
fields = record.fields,
mutationId = mutationId
)
}
val records = operation.normalize(
data = operationData,
customScalarAdapters = customScalarAdapters,
cacheKeyGenerator = cacheKeyGenerator,
metadataGenerator = metadataGenerator,
).values.map { record ->
Record(
key = record.key,
fields = record.fields,
mutationId = mutationId
)
}

val changedKeys = lock.write {
/**
* TODO: should we forward the cache headers to the optimistic store?
*/
Expand Down Expand Up @@ -281,7 +283,7 @@ internal class DefaultApolloStore(
cache: ReadOnlyNormalizedCache,
cacheResolver: Any,
cacheHeaders: CacheHeaders,
): D {
): CacheDataTransformer<D> {
return when (cacheResolver) {
is CacheResolver -> readDataFromCache(
cacheKey,
Expand All @@ -304,4 +306,3 @@ internal class DefaultApolloStore(
}
}
}

0 comments on commit 1ba7b25

Please sign in to comment.