Skip to content

Commit

Permalink
Move serialization outside of cache lock (#5101)
Browse files Browse the repository at this point in the history
Co-authored-by: Martin Bonnin <[email protected]>
  • Loading branch information
ashare80 and martinbonnin authored Jul 21, 2023
1 parent 88b5739 commit 6a06344
Show file tree
Hide file tree
Showing 8 changed files with 262 additions and 144 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
package com.apollographql.apollo3.cache.normalized.api

import com.apollographql.apollo3.annotations.ApolloInternal

/**
* Data read from the cache that can be represented as a JSON map.
*
* @see [toData]
*/
@ApolloInternal
interface CacheData {
fun toMap(): Map<String, Any?>
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
package com.apollographql.apollo3.cache.normalized.api

import com.apollographql.apollo3.annotations.ApolloDeprecatedSince
import com.apollographql.apollo3.annotations.ApolloExperimental
import com.apollographql.apollo3.annotations.ApolloInternal
import com.apollographql.apollo3.api.CompositeAdapter
import com.apollographql.apollo3.api.CustomScalarAdapters
import com.apollographql.apollo3.api.Executable
import com.apollographql.apollo3.api.Operation
Expand Down Expand Up @@ -57,13 +60,12 @@ fun <D : Executable.Data> Executable<D>.normalize(
.normalize(writer.root() as Map<String, Any?>, rootField().selections, rootField().type.rawType())
}


fun <D : Executable.Data> Executable<D>.readDataFromCache(
customScalarAdapters: CustomScalarAdapters,
cache: ReadOnlyNormalizedCache,
cacheResolver: CacheResolver,
cacheHeaders: CacheHeaders,
) = readInternal(
): D = readDataFromCache(
cacheKey = CacheKey.rootKey(),
customScalarAdapters = customScalarAdapters,
cache = cache,
Expand All @@ -77,6 +79,35 @@ fun <D : Executable.Data> Executable<D>.readDataFromCache(
cache: ReadOnlyNormalizedCache,
cacheResolver: CacheResolver,
cacheHeaders: CacheHeaders,
): D = readInternal(
cacheKey = cacheKey,
customScalarAdapters = customScalarAdapters,
cache = cache,
cacheResolver = cacheResolver,
cacheHeaders = cacheHeaders,
).toData(adapter(), customScalarAdapters)

fun <D : Executable.Data> Executable<D>.readDataFromCache(
cacheKey: CacheKey,
customScalarAdapters: CustomScalarAdapters,
cache: ReadOnlyNormalizedCache,
cacheResolver: ApolloResolver,
cacheHeaders: CacheHeaders,
): D = readInternal(
cacheKey = cacheKey,
customScalarAdapters = customScalarAdapters,
cache = cache,
cacheResolver = cacheResolver,
cacheHeaders = cacheHeaders,
).toData(adapter(), customScalarAdapters)

@ApolloInternal
fun <D : Executable.Data> Executable<D>.readDataFromCacheInternal(
cacheKey: CacheKey,
customScalarAdapters: CustomScalarAdapters,
cache: ReadOnlyNormalizedCache,
cacheResolver: CacheResolver,
cacheHeaders: CacheHeaders,
) = readInternal(
cacheKey = cacheKey,
customScalarAdapters = customScalarAdapters,
Expand All @@ -85,7 +116,8 @@ fun <D : Executable.Data> Executable<D>.readDataFromCache(
cacheHeaders = cacheHeaders,
)

fun <D : Executable.Data> Executable<D>.readDataFromCache(
@ApolloInternal
fun <D : Executable.Data> Executable<D>.readDataFromCacheInternal(
cacheKey: CacheKey,
customScalarAdapters: CustomScalarAdapters,
cache: ReadOnlyNormalizedCache,
Expand All @@ -106,25 +138,31 @@ private fun <D : Executable.Data> Executable<D>.readInternal(
cache: ReadOnlyNormalizedCache,
cacheResolver: Any,
cacheHeaders: CacheHeaders,
): D {
val map = CacheBatchReader(
): CacheData {
return CacheBatchReader(
cache = cache,
cacheHeaders = cacheHeaders,
cacheResolver = cacheResolver,
variables = variables(customScalarAdapters, true),
rootKey = cacheKey.key,
rootSelections = rootField().selections,
rootTypename = rootField().type.rawType().name
).toMap()

val reader = MapJsonReader(
root = map,
)
return adapter().fromJson(reader, customScalarAdapters)
).collectData()
}

fun Collection<Record>?.dependentKeys(): Set<String> {
return this?.flatMap {
it.fieldKeys()
}?.toSet() ?: emptySet()
}

@ApolloInternal
fun <D: Executable.Data> CacheData.toData(
adapter: CompositeAdapter<D>,
customScalarAdapters: CustomScalarAdapters,
): D {
val reader = MapJsonReader(
root = toMap(),
)
return adapter.fromJson(reader, customScalarAdapters)
}
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
package com.apollographql.apollo3.cache.normalized.api.internal

import com.apollographql.apollo3.annotations.ApolloInternal
import com.apollographql.apollo3.api.CompiledField
import com.apollographql.apollo3.api.CompiledFragment
import com.apollographql.apollo3.api.CompiledSelection
import com.apollographql.apollo3.api.Executable
import com.apollographql.apollo3.cache.normalized.api.ApolloResolver
import com.apollographql.apollo3.cache.normalized.api.CacheData
import com.apollographql.apollo3.cache.normalized.api.CacheHeaders
import com.apollographql.apollo3.cache.normalized.api.CacheKey
import com.apollographql.apollo3.cache.normalized.api.CacheResolver
Expand Down Expand Up @@ -83,7 +83,7 @@ internal class CacheBatchReader(
}
}

fun toMap(): Map<String, Any?> {
fun collectData(): CacheData {
pendingReferences.add(
PendingReference(
key = rootKey,
Expand Down Expand Up @@ -132,8 +132,7 @@ internal class CacheBatchReader(
}
}

@Suppress("UNCHECKED_CAST")
return data[emptyList()].replaceCacheKeys(emptyList()) as Map<String, Any?>
return CacheBatchReaderData(data)
}

/**
Expand Down Expand Up @@ -180,27 +179,35 @@ internal class CacheBatchReader(
}
}

private fun Any?.replaceCacheKeys(path: List<Any>): Any? {
return when (this) {
is CacheKey -> {
data[path].replaceCacheKeys(path)
}
is List<*> -> {
mapIndexed { index, src ->
src.replaceCacheKeys(path + index)
private data class CacheBatchReaderData(
private val data: Map<List<Any>, Map<String, Any?>>,
): CacheData {
@Suppress("UNCHECKED_CAST")
override fun toMap(): Map<String, Any?> {
return data[emptyList()].replaceCacheKeys(emptyList()) as Map<String, Any?>
}

private fun Any?.replaceCacheKeys(path: List<Any>): Any? {
return when (this) {
is CacheKey -> {
data[path].replaceCacheKeys(path)
}
}
is Map<*, *> -> {
// This will traverse Map custom scalars but this is ok as it shouldn't contain any CacheKey
mapValues {
it.value.replaceCacheKeys(path + (it.key as String))
is List<*> -> {
mapIndexed { index, src ->
src.replaceCacheKeys(path + index)
}
}
is Map<*, *> -> {
// This will traverse Map custom scalars but this is ok as it shouldn't contain any CacheKey
mapValues {
it.value.replaceCacheKeys(path + (it.key as String))
}
}
else -> {
// Scalar value
this
}
}
else -> {
// Scalar value
this
}
}
}
}

Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
package com.apollographql.apollo3.cache.normalized.api

import com.apollographql.apollo3.annotations.ApolloInternal

/**
* Data read from the cache that can be represented as a JSON map.
*
* @see [toData]
*/
@ApolloInternal
interface CacheData {
fun toMap(): Map<String, Any?>
}
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
package com.apollographql.apollo3.cache.normalized.api

import com.apollographql.apollo3.annotations.ApolloInternal
import com.apollographql.apollo3.api.CompositeAdapter
import com.apollographql.apollo3.api.CustomScalarAdapters
import com.apollographql.apollo3.api.Executable
import com.apollographql.apollo3.api.Fragment
Expand All @@ -12,7 +14,6 @@ import com.apollographql.apollo3.api.variables
import com.apollographql.apollo3.cache.normalized.api.internal.CacheBatchReader
import com.apollographql.apollo3.cache.normalized.api.internal.Normalizer


fun <D : Operation.Data> Operation<D>.normalize(
data: D,
customScalarAdapters: CustomScalarAdapters,
Expand All @@ -38,6 +39,34 @@ fun <D : Executable.Data> Executable<D>.readDataFromCache(
cache: ReadOnlyNormalizedCache,
cacheResolver: CacheResolver,
cacheHeaders: CacheHeaders,
):D = readInternal(
cacheKey = CacheKey.rootKey(),
customScalarAdapters = customScalarAdapters,
cache = cache,
cacheResolver = cacheResolver,
cacheHeaders = cacheHeaders,
).toData(adapter(), customScalarAdapters)

fun <D : Fragment.Data> Fragment<D>.readDataFromCache(
cacheKey: CacheKey,
customScalarAdapters: CustomScalarAdapters,
cache: ReadOnlyNormalizedCache,
cacheResolver: CacheResolver,
cacheHeaders: CacheHeaders,
): D = readInternal(
cacheKey = cacheKey,
customScalarAdapters = customScalarAdapters,
cache = cache,
cacheResolver = cacheResolver,
cacheHeaders = cacheHeaders,
).toData(adapter(), customScalarAdapters)

@ApolloInternal
fun <D : Executable.Data> Executable<D>.readDataFromCacheInternal(
customScalarAdapters: CustomScalarAdapters,
cache: ReadOnlyNormalizedCache,
cacheResolver: CacheResolver,
cacheHeaders: CacheHeaders,
) = readInternal(
cacheKey = CacheKey.rootKey(),
customScalarAdapters = customScalarAdapters,
Expand All @@ -46,7 +75,8 @@ fun <D : Executable.Data> Executable<D>.readDataFromCache(
cacheHeaders = cacheHeaders,
)

fun <D : Fragment.Data> Fragment<D>.readDataFromCache(
@ApolloInternal
fun <D : Fragment.Data> Fragment<D>.readDataFromCacheInternal(
cacheKey: CacheKey,
customScalarAdapters: CustomScalarAdapters,
cache: ReadOnlyNormalizedCache,
Expand All @@ -66,25 +96,31 @@ private fun <D : Executable.Data> Executable<D>.readInternal(
cache: ReadOnlyNormalizedCache,
cacheResolver: CacheResolver,
cacheHeaders: CacheHeaders,
): D {
val map = CacheBatchReader(
): CacheData {
return CacheBatchReader(
cache = cache,
cacheHeaders = cacheHeaders,
cacheResolver = cacheResolver,
variables = variables(customScalarAdapters, true),
rootKey = cacheKey.key,
rootSelections = rootField().selections,
rootTypename = rootField().type.rawType().name
).toMap()

val reader = MapJsonReader(
root = map,
)
return adapter().fromJson(reader, customScalarAdapters)
).collectData()
}

fun Collection<Record>?.dependentKeys(): Set<String> {
return this?.flatMap {
it.fieldKeys()
}?.toSet() ?: emptySet()
}

@ApolloInternal
fun <D: Executable.Data> CacheData.toData(
adapter: CompositeAdapter<D>,
customScalarAdapters: CustomScalarAdapters,
): D {
val reader = MapJsonReader(
root = toMap(),
)
return adapter.fromJson(reader, customScalarAdapters)
}
Loading

0 comments on commit 6a06344

Please sign in to comment.