Skip to content

Commit

Permalink
Cache data as interface
Browse files Browse the repository at this point in the history
  • Loading branch information
ashare80 committed Jul 19, 2023
1 parent 1a3b7f8 commit 946c7d2
Show file tree
Hide file tree
Showing 5 changed files with 63 additions and 65 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
package com.apollographql.apollo3.cache.normalized.api

/**
* Data read from the cache that can be represented as a JSON map.
*
* @see [toData]
*/
interface CacheData {
fun toMap(): Map<String, Any?>
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package com.apollographql.apollo3.cache.normalized.api

import com.apollographql.apollo3.annotations.ApolloExperimental
import com.apollographql.apollo3.api.CompositeAdapter
import com.apollographql.apollo3.api.CustomScalarAdapters
import com.apollographql.apollo3.api.Executable
import com.apollographql.apollo3.api.Operation
Expand All @@ -10,7 +11,6 @@ import com.apollographql.apollo3.api.json.MapJsonWriter
import com.apollographql.apollo3.api.toJson
import com.apollographql.apollo3.api.variables
import com.apollographql.apollo3.cache.normalized.api.internal.CacheBatchReader
import com.apollographql.apollo3.cache.normalized.api.internal.CacheDataTransformer
import com.apollographql.apollo3.cache.normalized.api.internal.Normalizer

fun <D : Operation.Data> Operation<D>.normalize(
Expand Down Expand Up @@ -106,7 +106,7 @@ private fun <D : Executable.Data> Executable<D>.readInternal(
cache: ReadOnlyNormalizedCache,
cacheResolver: Any,
cacheHeaders: CacheHeaders,
): CacheDataTransformer<D> {
): CacheData {
return CacheBatchReader(
cache = cache,
cacheHeaders = cacheHeaders,
Expand All @@ -115,11 +115,21 @@ private fun <D : Executable.Data> Executable<D>.readInternal(
rootKey = cacheKey.key,
rootSelections = rootField().selections,
rootTypename = rootField().type.rawType().name
).collectData(adapter())
).collectData()
}

fun Collection<Record>?.dependentKeys(): Set<String> {
return this?.flatMap {
it.fieldKeys()
}?.toSet() ?: emptySet()
}

fun <D: Executable.Data> CacheData.toData(
adapter: CompositeAdapter<D>,
customScalarAdapters: CustomScalarAdapters,
): D {
val reader = MapJsonReader(
root = toMap(),
)
return adapter.fromJson(reader, customScalarAdapters)
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@ package com.apollographql.apollo3.cache.normalized.api.internal
import com.apollographql.apollo3.api.CompiledField
import com.apollographql.apollo3.api.CompiledFragment
import com.apollographql.apollo3.api.CompiledSelection
import com.apollographql.apollo3.api.CompositeAdapter
import com.apollographql.apollo3.api.Executable
import com.apollographql.apollo3.cache.normalized.api.ApolloResolver
import com.apollographql.apollo3.cache.normalized.api.CacheData
import com.apollographql.apollo3.cache.normalized.api.CacheHeaders
import com.apollographql.apollo3.cache.normalized.api.CacheKey
import com.apollographql.apollo3.cache.normalized.api.CacheResolver
Expand Down Expand Up @@ -83,7 +83,7 @@ internal class CacheBatchReader(
}
}

fun <D: Executable.Data> collectData(adapter: CompositeAdapter<D>): CacheDataTransformer<D> {
fun collectData(): CacheData {
pendingReferences.add(
PendingReference(
key = rootKey,
Expand Down Expand Up @@ -132,7 +132,7 @@ internal class CacheBatchReader(
}
}

return CacheDataTransformer(adapter, data)
return CacheBatchReaderData(data)
}

/**
Expand Down Expand Up @@ -178,4 +178,36 @@ internal class CacheBatchReader(
}
}
}

private data class CacheBatchReaderData(
private val data: MutableMap<List<Any>, Map<String, Any?>>,
): CacheData {
@Suppress("UNCHECKED_CAST")
override fun toMap(): Map<String, Any?> {
return data[emptyList()].replaceCacheKeys(emptyList()) as Map<String, Any?>
}

private fun Any?.replaceCacheKeys(path: List<Any>): Any? {
return when (this) {
is CacheKey -> {
data[path].replaceCacheKeys(path)
}
is List<*> -> {
mapIndexed { index, src ->
src.replaceCacheKeys(path + index)
}
}
is Map<*, *> -> {
// This will traverse Map custom scalars but this is ok as it shouldn't contain any CacheKey
mapValues {
it.value.replaceCacheKeys(path + (it.key as String))
}
}
else -> {
// Scalar value
this
}
}
}
}
}

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,11 @@ import com.apollographql.apollo3.cache.normalized.api.NormalizedCacheFactory
import com.apollographql.apollo3.cache.normalized.api.ReadOnlyNormalizedCache
import com.apollographql.apollo3.cache.normalized.api.Record
import com.apollographql.apollo3.cache.normalized.api.RecordMerger
import com.apollographql.apollo3.cache.normalized.api.internal.CacheDataTransformer
import com.apollographql.apollo3.cache.normalized.api.CacheData
import com.apollographql.apollo3.cache.normalized.api.internal.OptimisticCache
import com.apollographql.apollo3.cache.normalized.api.normalize
import com.apollographql.apollo3.cache.normalized.api.readDataFromCache
import com.apollographql.apollo3.cache.normalized.api.toData
import com.benasher44.uuid.Uuid
import kotlinx.coroutines.channels.BufferOverflow
import kotlinx.coroutines.flow.MutableSharedFlow
Expand Down Expand Up @@ -116,7 +117,7 @@ internal class DefaultApolloStore(
cacheHeaders = cacheHeaders,
cacheKey = CacheKey.rootKey()
)
}.toData(customScalarAdapters)
}.toData(operation.adapter(), customScalarAdapters)
}

override suspend fun <D : Fragment.Data> readFragment(
Expand All @@ -133,7 +134,7 @@ internal class DefaultApolloStore(
cacheHeaders = cacheHeaders,
cacheKey = cacheKey
)
}.toData(customScalarAdapters)
}.toData(fragment.adapter(), customScalarAdapters)
}


Expand Down Expand Up @@ -283,7 +284,7 @@ internal class DefaultApolloStore(
cache: ReadOnlyNormalizedCache,
cacheResolver: Any,
cacheHeaders: CacheHeaders,
): CacheDataTransformer<D> {
): CacheData {
return when (cacheResolver) {
is CacheResolver -> readDataFromCache(
cacheKey,
Expand Down

0 comments on commit 946c7d2

Please sign in to comment.