From b3ef6b4d290c07f962a78810c04af49742fcb8c8 Mon Sep 17 00:00:00 2001 From: BoD Date: Tue, 11 Mar 2025 12:22:48 +0100 Subject: [PATCH 01/29] Hashed CacheKeys (WIP) --- .../api/normalized-cache-incubating.api | 4 +- .../api/normalized-cache-incubating.klib.api | 3 +- .../cache/normalized/api/CacheKey.kt | 10 +- .../normalized/internal/CacheBatchReader.kt | 2 +- .../cache/normalized/internal/Normalizer.kt | 9 +- .../sql/internal/BlobRecordSerializer.kt | 2 +- .../normalized/sql/SqlNormalizedCacheTest.kt | 3 + .../kotlin/test/DeferNormalizedCacheTest.kt | 6 +- .../kotlin/DanglingReferencesTest.kt | 19 +-- .../commonTest/kotlin/GarbageCollectTest.kt | 31 ++-- .../kotlin/ReachableCacheKeysTest.kt | 103 +++++++------- .../src/commonTest/kotlin/StaleFieldsTest.kt | 133 +++++++++--------- .../src/commonTest/kotlin/IncludeTest.kt | 5 +- .../src/commonTest/kotlin/test/StoreTest.kt | 13 +- .../src/commonTest/kotlin/test/StoreTest.kt | 13 +- .../src/commonTest/kotlin/test/StoreTest.kt | 13 +- .../src/commonTest/kotlin/NormalizerTest.kt | 57 ++++---- .../commonTest/kotlin/OptimisticCacheTest.kt | 3 +- .../src/commonTest/kotlin/OtherCacheTest.kt | 3 +- .../src/commonTest/kotlin/StoreTest.kt | 11 +- .../FragmentNormalizerTest.kt | 3 +- .../kotlin/CacheMissLoggingInterceptorTest.kt | 3 +- .../kotlin/ConnectionPaginationTest.kt | 6 - .../ConnectionProgrammaticPaginationTest.kt | 6 - .../ConnectionWithNodesPaginationTest.kt | 6 - .../kotlin/CursorBasedPaginationTest.kt | 6 - .../OffsetBasedWithArrayPaginationTest.kt | 6 - ...fsetBasedWithPageAndInputPaginationTest.kt | 6 - .../OffsetBasedWithPagePaginationTest.kt | 25 ---- .../kotlin/test/CachePartialResultTest.kt | 39 ++--- .../commonTest/kotlin/test/StoreErrorsTest.kt | 11 +- 31 files changed, 272 insertions(+), 288 deletions(-) diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.api b/normalized-cache-incubating/api/normalized-cache-incubating.api index 4e6d4260..dff0c266 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.api @@ -227,8 +227,9 @@ public final class com/apollographql/cache/normalized/api/CacheHeaders$Companion public final class com/apollographql/cache/normalized/api/CacheKey { public static final field Companion Lcom/apollographql/cache/normalized/api/CacheKey$Companion; - public fun (Ljava/lang/String;)V public fun (Ljava/lang/String;Ljava/util/List;)V + public fun (Ljava/lang/String;Z)V + public synthetic fun (Ljava/lang/String;ZILkotlin/jvm/internal/DefaultConstructorMarker;)V public fun (Ljava/lang/String;[Ljava/lang/String;)V public static final fun canDeserialize (Ljava/lang/String;)Z public static final fun deserialize (Ljava/lang/String;)Lcom/apollographql/cache/normalized/api/CacheKey; @@ -563,6 +564,7 @@ public final class com/apollographql/cache/normalized/api/TypePolicyCacheKeyGene } public final class com/apollographql/cache/normalized/internal/NormalizerKt { + public static final fun hashed (Ljava/lang/String;)Ljava/lang/String; public static final fun normalized (Lcom/apollographql/apollo/api/Executable$Data;Lcom/apollographql/apollo/api/Executable;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;)Ljava/util/Map; public static final fun normalized (Ljava/util/Map;Lcom/apollographql/apollo/api/Executable;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;)Ljava/util/Map; public static synthetic fun normalized$default (Lcom/apollographql/apollo/api/Executable$Data;Lcom/apollographql/apollo/api/Executable;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;ILjava/lang/Object;)Ljava/util/Map; diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api index d11ce69c..59016e75 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api @@ -171,9 +171,9 @@ final class com.apollographql.cache.normalized.api/CacheHeaders { // com.apollog } final class com.apollographql.cache.normalized.api/CacheKey { // com.apollographql.cache.normalized.api/CacheKey|null[0] - constructor (kotlin/String) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String){}[0] constructor (kotlin/String, kotlin.collections/List) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String;kotlin.collections.List){}[0] constructor (kotlin/String, kotlin/Array...) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String;kotlin.Array...){}[0] + constructor (kotlin/String, kotlin/Boolean = ...) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String;kotlin.Boolean){}[0] final val key // com.apollographql.cache.normalized.api/CacheKey.key|{}key[0] final fun (): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.key.|(){}[0] @@ -573,6 +573,7 @@ final fun (com.apollographql.cache.normalized/ApolloStore).com.apollographql.cac final fun (com.apollographql.cache.normalized/ApolloStore).com.apollographql.cache.normalized/removeUnreachableRecords(): kotlin.collections/Set // com.apollographql.cache.normalized/removeUnreachableRecords|removeUnreachableRecords@com.apollographql.cache.normalized.ApolloStore(){}[0] final fun (kotlin.collections/Collection?).com.apollographql.cache.normalized.api/dependentKeys(): kotlin.collections/Set // com.apollographql.cache.normalized.api/dependentKeys|dependentKeys@kotlin.collections.Collection?(){}[0] final fun (kotlin.collections/Map).com.apollographql.cache.normalized/getReachableCacheKeys(): kotlin.collections/Set // com.apollographql.cache.normalized/getReachableCacheKeys|getReachableCacheKeys@kotlin.collections.Map(){}[0] +final fun (kotlin/String).com.apollographql.cache.normalized.internal/hashed(): kotlin/String // com.apollographql.cache.normalized.internal/hashed|hashed@kotlin.String(){}[0] final fun <#A: com.apollographql.apollo.api/Executable.Data> (#A).com.apollographql.cache.normalized.api/withErrors(com.apollographql.apollo.api/Executable<#A>, kotlin.collections/List?, com.apollographql.apollo.api/CustomScalarAdapters = ...): kotlin.collections/Map // com.apollographql.cache.normalized.api/withErrors|withErrors@0:0(com.apollographql.apollo.api.Executable<0:0>;kotlin.collections.List?;com.apollographql.apollo.api.CustomScalarAdapters){0§}[0] final fun <#A: com.apollographql.apollo.api/Executable.Data> (#A).com.apollographql.cache.normalized.internal/normalized(com.apollographql.apollo.api/Executable<#A>, kotlin/String = ..., com.apollographql.apollo.api/CustomScalarAdapters = ..., com.apollographql.cache.normalized.api/CacheKeyGenerator = ..., com.apollographql.cache.normalized.api/MetadataGenerator = ..., com.apollographql.cache.normalized.api/FieldKeyGenerator = ..., com.apollographql.cache.normalized.api/EmbeddedFieldsProvider = ...): kotlin.collections/Map // com.apollographql.cache.normalized.internal/normalized|normalized@0:0(com.apollographql.apollo.api.Executable<0:0>;kotlin.String;com.apollographql.apollo.api.CustomScalarAdapters;com.apollographql.cache.normalized.api.CacheKeyGenerator;com.apollographql.cache.normalized.api.MetadataGenerator;com.apollographql.cache.normalized.api.FieldKeyGenerator;com.apollographql.cache.normalized.api.EmbeddedFieldsProvider){0§}[0] final fun <#A: com.apollographql.apollo.api/Executable.Data> (kotlin.collections/Map).com.apollographql.cache.normalized.internal/normalized(com.apollographql.apollo.api/Executable<#A>, kotlin/String = ..., com.apollographql.apollo.api/CustomScalarAdapters = ..., com.apollographql.cache.normalized.api/CacheKeyGenerator = ..., com.apollographql.cache.normalized.api/MetadataGenerator = ..., com.apollographql.cache.normalized.api/FieldKeyGenerator = ..., com.apollographql.cache.normalized.api/EmbeddedFieldsProvider = ...): kotlin.collections/Map // com.apollographql.cache.normalized.internal/normalized|normalized@kotlin.collections.Map(com.apollographql.apollo.api.Executable<0:0>;kotlin.String;com.apollographql.apollo.api.CustomScalarAdapters;com.apollographql.cache.normalized.api.CacheKeyGenerator;com.apollographql.cache.normalized.api.MetadataGenerator;com.apollographql.cache.normalized.api.FieldKeyGenerator;com.apollographql.cache.normalized.api.EmbeddedFieldsProvider){0§}[0] diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt index 2719a2e3..d5e0480b 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt @@ -1,5 +1,6 @@ package com.apollographql.cache.normalized.api +import com.apollographql.cache.normalized.internal.hashed import kotlin.jvm.JvmStatic /** @@ -7,7 +8,10 @@ import kotlin.jvm.JvmStatic * * @param key The key of the object in the cache. The key must be globally unique. */ -class CacheKey(val key: String) { +class CacheKey( + val key: String, + internal val isHashed: Boolean = false, +) { /** * Builds a [CacheKey] from a typename and a list of Strings. @@ -38,6 +42,10 @@ class CacheKey(val key: String) { override fun toString() = "CacheKey($key)" + internal val hashedKey by lazy { + if (isHashed) key else key.hashed() + } + fun serialize(): String { return "$SERIALIZATION_TEMPLATE{$key}" } diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/CacheBatchReader.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/CacheBatchReader.kt index df20c4bb..973b3add 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/CacheBatchReader.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/CacheBatchReader.kt @@ -194,7 +194,7 @@ internal class CacheBatchReader( is CacheKey -> { pendingReferences.add( PendingReference( - key = key, + key = hashedKey, selections = selections, parentType = parentType, path = path, diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt index 1a0b02a0..d21f9f30 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt @@ -155,7 +155,7 @@ internal class Normalizer( } records[key] = mergedRecord - return CacheKey(key) + return CacheKey(key, isHashed = true) } @@ -211,6 +211,7 @@ internal class Normalizer( if (key == null) { key = path } + key = key.hashed() if (embeddedFields.contains(field.name)) { buildFields(value, key, field.selections, field.type.rawType()) .mapValues { it.value.fieldValue } @@ -293,3 +294,9 @@ fun DataWithErrors.normalized( return Normalizer(variables, rootKey, cacheKeyGenerator, metadataGenerator, fieldKeyGenerator, embeddedFieldsProvider) .normalize(this, executable.rootField().selections, executable.rootField().type.rawType()) } + + +@OptIn(ExperimentalStdlibApi::class) +fun String.hashed(): String { + return hashCode().toHexString() +} diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordSerializer.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordSerializer.kt index 682e0dea..b8f78e11 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordSerializer.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordSerializer.kt @@ -164,7 +164,7 @@ internal object BlobRecordSerializer { JSON_NUMBER -> JsonNumber(readString()) BOOLEAN -> readByte() > 0 CACHE_KEY -> { - CacheKey(readString()) + CacheKey(readString(), isHashed = true) } LIST -> { diff --git a/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt b/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt index 0ca8f53e..e37380ba 100644 --- a/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt +++ b/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt @@ -16,6 +16,7 @@ import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.sql.internal.BlobRecordDatabase import com.apollographql.cache.normalized.sql.internal.blob.BlobQueries import kotlin.test.BeforeTest +import kotlin.test.Ignore import kotlin.test.Test import kotlin.test.assertEquals import kotlin.test.assertNotNull @@ -205,6 +206,7 @@ class SqlNormalizedCacheTest { assertEquals(expected = true, actual = record.fields["newFieldKey"]) } + @Ignore @Test fun testPatternRemove() { createRecord("specialKey1") @@ -217,6 +219,7 @@ class SqlNormalizedCacheTest { assertNotNull(cache.loadRecord("regularKey1", CacheHeaders.NONE)) } + @Ignore @Test fun testPatternRemoveWithEscape() { createRecord("%1") diff --git a/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt b/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt index 1923306e..ddb222ce 100644 --- a/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt +++ b/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt @@ -17,6 +17,7 @@ import com.apollographql.cache.normalized.FetchPolicy import com.apollographql.cache.normalized.api.CacheHeaders import com.apollographql.cache.normalized.apolloStore import com.apollographql.cache.normalized.fetchPolicy +import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.optimisticUpdates import com.apollographql.cache.normalized.store @@ -452,7 +453,8 @@ class DeferNormalizedCacheTest { val cacheExceptionResponse = actual.last() assertIs(networkExceptionResponse.exception) assertIs(cacheExceptionResponse.exception) - assertEquals("Object 'computers.0.screen' has no field named 'isColor'", cacheExceptionResponse.exception!!.message) + val hashedKey = ("computers.0".hashed() + ".screen").hashed() + assertEquals("Object '$hashedKey' has no field named 'isColor'", cacheExceptionResponse.exception!!.message) } @Test @@ -539,7 +541,7 @@ class DeferNormalizedCacheTest { val multipartBody = mockServer.enqueueMultipart("application/json") multipartBody.enqueuePart(jsonList[0].encodeUtf8(), false) val recordFields = apolloClient.query(SimpleDeferQuery()).fetchPolicy(FetchPolicy.NetworkOnly).toFlow().map { - apolloClient.apolloStore.accessCache { it.loadRecord("computers.0", CacheHeaders.NONE)!!.fields }.also { + apolloClient.apolloStore.accessCache { it.loadRecord("computers.0".hashed(), CacheHeaders.NONE)!!.fields }.also { multipartBody.enqueuePart(jsonList[1].encodeUtf8(), true) } }.toList() diff --git a/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt b/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt index 569d3bfe..dcbfef6d 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt @@ -7,6 +7,7 @@ import com.apollographql.cache.normalized.FetchPolicy import com.apollographql.cache.normalized.allRecords import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.fetchPolicy +import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.removeDanglingReferences import com.apollographql.cache.normalized.sql.SqlNormalizedCacheFactory @@ -35,13 +36,13 @@ class DanglingReferencesTest { .execute() var allRecords = store.accessCache { it.allRecords() } - assertTrue(allRecords["Repository:0"]!!.fields.containsKey("starGazers")) + assertTrue(allRecords["Repository:0".hashed()]!!.fields.containsKey("starGazers")) // Remove User 1, now Repository 0.starGazers is a dangling reference - store.remove(CacheKey("User:1"), cascade = false) + store.remove(CacheKey("User:1".hashed()), cascade = false) val removedFieldsAndRecords = store.removeDanglingReferences() assertEquals( - setOf("Repository:0.starGazers"), + setOf("${"Repository:0".hashed()}.starGazers"), removedFieldsAndRecords.removedFields ) assertEquals( @@ -49,7 +50,7 @@ class DanglingReferencesTest { removedFieldsAndRecords.removedRecords ) allRecords = store.accessCache { it.allRecords() } - assertFalse(allRecords["Repository:0"]!!.fields.containsKey("starGazers")) + assertFalse(allRecords["Repository:0".hashed()]!!.fields.containsKey("starGazers")) } } @@ -74,20 +75,20 @@ class DanglingReferencesTest { // thus (metaProjects.0.0) is empty and removed // thus (QUERY_ROOT).metaProjects is a dangling reference // thus QUERY_ROOT is empty and removed - store.remove(CacheKey("User:0"), cascade = false) + store.remove(CacheKey("User:0".hashed()), cascade = false) val removedFieldsAndRecords = store.removeDanglingReferences() assertEquals( setOf( - "metaProjects.0.0.type.owners", - "metaProjects.0.0.type", + ("metaProjects.0.0".hashed() + ".type").hashed() + ".owners", + "metaProjects.0.0".hashed() + ".type", "QUERY_ROOT.metaProjects", ), removedFieldsAndRecords.removedFields ) assertEquals( setOf( - CacheKey("metaProjects.0.0.type"), - CacheKey("metaProjects.0.0"), + CacheKey(("metaProjects.0.0".hashed() + ".type").hashed()), + CacheKey("metaProjects.0.0".hashed()), CacheKey("QUERY_ROOT"), ), removedFieldsAndRecords.removedRecords diff --git a/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt b/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt index 49ed6e34..1c41a60c 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt @@ -10,6 +10,7 @@ import com.apollographql.cache.normalized.api.SchemaCoordinatesMaxAgeProvider import com.apollographql.cache.normalized.cacheHeaders import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.garbageCollect +import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.sql.SqlNormalizedCacheFactory import com.apollographql.cache.normalized.store @@ -51,35 +52,35 @@ class GarbageCollectTest { val garbageCollectResult = store.garbageCollect(maxAgeProvider) assertEquals( setOf( - "metaProjects.0.0.type.owners", - "metaProjects.0.1.type.owners", - "metaProjects.1.0.type.owners", + ("metaProjects.0.0".hashed() + ".type").hashed() + ".owners", + ("metaProjects.0.1".hashed() + ".type").hashed() + ".owners", + ("metaProjects.1.0".hashed() + ".type").hashed() + ".owners", ), garbageCollectResult.removedStaleFields.removedFields ) assertEquals( setOf( - CacheKey("metaProjects.0.0.type"), - CacheKey("metaProjects.0.1.type"), - CacheKey("metaProjects.1.0.type"), + CacheKey(("metaProjects.0.0".hashed() + ".type").hashed()), + CacheKey(("metaProjects.0.1".hashed() + ".type").hashed()), + CacheKey(("metaProjects.1.0".hashed() + ".type").hashed()), ), garbageCollectResult.removedStaleFields.removedRecords ) assertEquals( setOf( - "metaProjects.0.0.type", - "metaProjects.0.1.type", - "metaProjects.1.0.type", + "metaProjects.0.0".hashed() + ".type", + "metaProjects.0.1".hashed() + ".type", + "metaProjects.1.0".hashed() + ".type", "QUERY_ROOT.metaProjects", ), garbageCollectResult.removedDanglingReferences.removedFields ) assertEquals( setOf( - CacheKey("metaProjects.0.0"), - CacheKey("metaProjects.0.1"), - CacheKey("metaProjects.1.0"), + CacheKey("metaProjects.0.0".hashed()), + CacheKey("metaProjects.0.1".hashed()), + CacheKey("metaProjects.1.0".hashed()), CacheKey("QUERY_ROOT"), ), garbageCollectResult.removedDanglingReferences.removedRecords @@ -87,9 +88,9 @@ class GarbageCollectTest { assertEquals( setOf( - CacheKey("User:0"), - CacheKey("User:1"), - CacheKey("User:2"), + CacheKey("User:0".hashed()), + CacheKey("User:1".hashed()), + CacheKey("User:2".hashed()), ), garbageCollectResult.removedUnreachableRecords ) diff --git a/tests/garbage-collection/src/commonTest/kotlin/ReachableCacheKeysTest.kt b/tests/garbage-collection/src/commonTest/kotlin/ReachableCacheKeysTest.kt index 6e1945f1..17aae080 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/ReachableCacheKeysTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/ReachableCacheKeysTest.kt @@ -8,6 +8,7 @@ import com.apollographql.cache.normalized.allRecords import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.getReachableCacheKeys +import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.removeUnreachableRecords import com.apollographql.cache.normalized.sql.SqlNormalizedCacheFactory @@ -130,35 +131,35 @@ class ReachableCacheKeysTest { assertContentEquals( listOf( CacheKey("QUERY_ROOT"), - CacheKey("Repository:8"), - CacheKey("Repository:7"), - CacheKey("User:43"), - CacheKey("User:42"), - CacheKey("User:0"), - CacheKey("Repository:6"), - CacheKey("Repository:5"), - CacheKey("Repository:4"), - CacheKey("Repository:3"), - CacheKey("Repository:2"), - CacheKey("Repository:1"), + CacheKey("Repository:8".hashed()), + CacheKey("Repository:7".hashed()), + CacheKey("User:43".hashed()), + CacheKey("User:42".hashed()), + CacheKey("User:0".hashed()), + CacheKey("Repository:6".hashed()), + CacheKey("Repository:5".hashed()), + CacheKey("Repository:4".hashed()), + CacheKey("Repository:3".hashed()), + CacheKey("Repository:2".hashed()), + CacheKey("Repository:1".hashed()), ), reachableCacheKeys ) // Remove User 43, now Repositories 5 and 6 should not be reachable / 7 should still be reachable - store.remove(CacheKey("User:43"), cascade = false) + store.remove(CacheKey("User:43".hashed()), cascade = false) reachableCacheKeys = store.accessCache { it.allRecords().getReachableCacheKeys() } assertContentEquals( listOf( CacheKey("QUERY_ROOT"), - CacheKey("Repository:8"), - CacheKey("Repository:7"), - CacheKey("User:42"), - CacheKey("User:0"), - CacheKey("Repository:4"), - CacheKey("Repository:3"), - CacheKey("Repository:2"), - CacheKey("Repository:1"), + CacheKey("Repository:8".hashed()), + CacheKey("Repository:7".hashed()), + CacheKey("User:42".hashed()), + CacheKey("User:0".hashed()), + CacheKey("Repository:4".hashed()), + CacheKey("Repository:3".hashed()), + CacheKey("Repository:2".hashed()), + CacheKey("Repository:1".hashed()), ), reachableCacheKeys ) @@ -166,38 +167,38 @@ class ReachableCacheKeysTest { // Add a non-reachable Repository, reachableCacheKeys should not change store.writeFragment( RepositoryFragmentImpl(), - CacheKey("Repository:500"), + CacheKey("Repository:500".hashed()), RepositoryFragment(id = "500", __typename = "Repository", starGazers = emptyList()), ) reachableCacheKeys = store.accessCache { it.allRecords().getReachableCacheKeys() } assertContentEquals( listOf( CacheKey("QUERY_ROOT"), - CacheKey("Repository:8"), - CacheKey("Repository:7"), - CacheKey("User:42"), - CacheKey("User:0"), - CacheKey("Repository:4"), - CacheKey("Repository:3"), - CacheKey("Repository:2"), - CacheKey("Repository:1"), + CacheKey("Repository:8".hashed()), + CacheKey("Repository:7".hashed()), + CacheKey("User:42".hashed()), + CacheKey("User:0".hashed()), + CacheKey("Repository:4".hashed()), + CacheKey("Repository:3".hashed()), + CacheKey("Repository:2".hashed()), + CacheKey("Repository:1".hashed()), ), reachableCacheKeys ) assertEquals( setOf( - CacheKey("User:42"), - CacheKey("Repository:6"), - CacheKey("User:0"), - CacheKey("Repository:8"), - CacheKey("Repository:3"), - CacheKey("Repository:1"), - CacheKey("Repository:2"), - CacheKey("Repository:4"), + CacheKey("User:42".hashed()), + CacheKey("Repository:6".hashed()), + CacheKey("User:0".hashed()), + CacheKey("Repository:8".hashed()), + CacheKey("Repository:3".hashed()), + CacheKey("Repository:1".hashed()), + CacheKey("Repository:2".hashed()), + CacheKey("Repository:4".hashed()), CacheKey("QUERY_ROOT"), - CacheKey("Repository:5"), - CacheKey("Repository:500"), - CacheKey("Repository:7"), + CacheKey("Repository:5".hashed()), + CacheKey("Repository:500".hashed()), + CacheKey("Repository:7".hashed()), ), store.accessCache { it.allRecords() }.keys.map { CacheKey(it) }.toSet() ) @@ -207,22 +208,22 @@ class ReachableCacheKeysTest { assertEquals( setOf( CacheKey("QUERY_ROOT"), - CacheKey("Repository:8"), - CacheKey("Repository:7"), - CacheKey("User:42"), - CacheKey("User:0"), - CacheKey("Repository:4"), - CacheKey("Repository:3"), - CacheKey("Repository:2"), - CacheKey("Repository:1"), + CacheKey("Repository:8".hashed()), + CacheKey("Repository:7".hashed()), + CacheKey("User:42".hashed()), + CacheKey("User:0".hashed()), + CacheKey("Repository:4".hashed()), + CacheKey("Repository:3".hashed()), + CacheKey("Repository:2".hashed()), + CacheKey("Repository:1".hashed()), ), store.accessCache { it.allRecords() }.keys.map { CacheKey(it) }.toSet() ) assertEquals( setOf( - CacheKey("Repository:6"), - CacheKey("Repository:5"), - CacheKey("Repository:500"), + CacheKey("Repository:6".hashed()), + CacheKey("Repository:5".hashed()), + CacheKey("Repository:500".hashed()), ), removedKeys ) diff --git a/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt b/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt index a74e8054..f3a9f54e 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt @@ -13,6 +13,7 @@ import com.apollographql.cache.normalized.api.GlobalMaxAgeProvider import com.apollographql.cache.normalized.api.SchemaCoordinatesMaxAgeProvider import com.apollographql.cache.normalized.cacheHeaders import com.apollographql.cache.normalized.fetchPolicy +import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.removeStaleFields import com.apollographql.cache.normalized.sql.SqlNormalizedCacheFactory @@ -45,10 +46,10 @@ class StaleFieldsTest { .execute() var allRecords = store.accessCache { it.allRecords() } - assertTrue(allRecords["Repository:0"]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:0"]!!.fields.containsKey("starGazers")) - assertTrue(allRecords["Repository:1"]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:1"]!!.fields.containsKey("starGazers")) + assertTrue(allRecords["Repository:0".hashed()]!!.fields.containsKey("stars")) + assertTrue(allRecords["Repository:0".hashed()]!!.fields.containsKey("starGazers")) + assertTrue(allRecords["Repository:1".hashed()]!!.fields.containsKey("stars")) + assertTrue(allRecords["Repository:1".hashed()]!!.fields.containsKey("starGazers")) val maxAgeProvider = SchemaCoordinatesMaxAgeProvider( Cache.maxAges, @@ -58,18 +59,18 @@ class StaleFieldsTest { // Repository.stars has a max age of 60 seconds, so they should be removed / User has a max age of 90 seconds, so Repository.starGazers should be kept assertEquals( setOf( - "Repository:0.stars", - "Repository:1.stars", + "Repository:0".hashed() + ".stars", + "Repository:1".hashed() + ".stars", ), removedFieldsAndRecords.removedFields ) assertEquals( emptySet(), removedFieldsAndRecords.removedRecords ) allRecords = store.accessCache { it.allRecords() } - assertFalse(allRecords["Repository:0"]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:0"]!!.fields.containsKey("starGazers")) - assertFalse(allRecords["Repository:1"]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:1"]!!.fields.containsKey("starGazers")) + assertFalse(allRecords["Repository:0".hashed()]!!.fields.containsKey("stars")) + assertTrue(allRecords["Repository:0".hashed()]!!.fields.containsKey("starGazers")) + assertFalse(allRecords["Repository:1".hashed()]!!.fields.containsKey("stars")) + assertTrue(allRecords["Repository:1".hashed()]!!.fields.containsKey("starGazers")) mockServer.enqueueString(REPOSITORY_LIST_RESPONSE) apolloClient.query(RepositoryListQuery()) @@ -80,20 +81,20 @@ class StaleFieldsTest { // Repository.stars and Repository.starGazers should be removed assertEquals( setOf( - "Repository:0.stars", - "Repository:0.starGazers", - "Repository:1.stars", - "Repository:1.starGazers", + "Repository:0".hashed() + ".stars", + "Repository:0".hashed() + ".starGazers", + "Repository:1".hashed() + ".stars", + "Repository:1".hashed() + ".starGazers", ), removedFieldsAndRecords.removedFields ) assertEquals( emptySet(), removedFieldsAndRecords.removedRecords ) allRecords = store.accessCache { it.allRecords() } - assertFalse(allRecords["Repository:0"]!!.fields.containsKey("stars")) - assertFalse(allRecords["Repository:0"]!!.fields.containsKey("starGazers")) - assertFalse(allRecords["Repository:1"]!!.fields.containsKey("stars")) - assertFalse(allRecords["Repository:1"]!!.fields.containsKey("starGazers")) + assertFalse(allRecords["Repository:0".hashed()]!!.fields.containsKey("stars")) + assertFalse(allRecords["Repository:0".hashed()]!!.fields.containsKey("starGazers")) + assertFalse(allRecords["Repository:1".hashed()]!!.fields.containsKey("stars")) + assertFalse(allRecords["Repository:1".hashed()]!!.fields.containsKey("starGazers")) } } @@ -113,10 +114,10 @@ class StaleFieldsTest { .execute() var allRecords = store.accessCache { it.allRecords() } - assertTrue(allRecords["projects.0"]!!.fields.containsKey("velocity")) - assertTrue(allRecords["projects.0"]!!.fields.containsKey("isUrgent")) - assertTrue(allRecords["projects.1"]!!.fields.containsKey("velocity")) - assertTrue(allRecords["projects.1"]!!.fields.containsKey("isUrgent")) + assertTrue(allRecords["projects.0".hashed()]!!.fields.containsKey("velocity")) + assertTrue(allRecords["projects.0".hashed()]!!.fields.containsKey("isUrgent")) + assertTrue(allRecords["projects.1".hashed()]!!.fields.containsKey("velocity")) + assertTrue(allRecords["projects.1".hashed()]!!.fields.containsKey("isUrgent")) val maxAgeProvider = SchemaCoordinatesMaxAgeProvider( Cache.maxAges, @@ -126,18 +127,18 @@ class StaleFieldsTest { // Project.velocity has a max age of 60 seconds, so they should be removed / Project.isUrgent has a max age of 90 seconds, so they should be kept assertEquals( setOf( - "projects.0.velocity", - "projects.1.velocity", + "projects.0".hashed() + ".velocity", + "projects.1".hashed() + ".velocity", ), removedFieldsAndRecords.removedFields ) assertEquals( emptySet(), removedFieldsAndRecords.removedRecords ) allRecords = store.accessCache { it.allRecords() } - assertFalse(allRecords["projects.0"]!!.fields.containsKey("velocity")) - assertTrue(allRecords["projects.0"]!!.fields.containsKey("isUrgent")) - assertFalse(allRecords["projects.1"]!!.fields.containsKey("velocity")) - assertTrue(allRecords["projects.1"]!!.fields.containsKey("isUrgent")) + assertFalse(allRecords["projects.0".hashed()]!!.fields.containsKey("velocity")) + assertTrue(allRecords["projects.0".hashed()]!!.fields.containsKey("isUrgent")) + assertFalse(allRecords["projects.1".hashed()]!!.fields.containsKey("velocity")) + assertTrue(allRecords["projects.1".hashed()]!!.fields.containsKey("isUrgent")) mockServer.enqueueString(PROJECT_LIST_RESPONSE) apolloClient.query(ProjectListQuery()) @@ -148,21 +149,21 @@ class StaleFieldsTest { // Project.velocity and Project.isUrgent should be removed, their records being empty they should be removed assertEquals( setOf( - "projects.0.velocity", - "projects.0.isUrgent", - "projects.1.velocity", - "projects.1.isUrgent", + "projects.0".hashed() + ".velocity", + "projects.0".hashed() + ".isUrgent", + "projects.1".hashed() + ".velocity", + "projects.1".hashed() + ".isUrgent", ), removedFieldsAndRecords.removedFields ) assertEquals( setOf( - CacheKey("projects.0"), - CacheKey("projects.1"), + CacheKey("projects.0".hashed()), + CacheKey("projects.1".hashed()), ), removedFieldsAndRecords.removedRecords ) allRecords = store.accessCache { it.allRecords() } - assertFalse(allRecords.containsKey("projects.0")) - assertFalse(allRecords.containsKey("projects.1")) + assertFalse(allRecords.containsKey("projects.0".hashed())) + assertFalse(allRecords.containsKey("projects.1".hashed())) } } @@ -182,42 +183,42 @@ class StaleFieldsTest { .execute() var allRecords = store.accessCache { it.allRecords() } - assertTrue(allRecords["Repository:0"]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:0"]!!.fields.containsKey("starGazers")) - assertTrue(allRecords["Repository:1"]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:1"]!!.fields.containsKey("starGazers")) + assertTrue(allRecords["Repository:0".hashed()]!!.fields.containsKey("stars")) + assertTrue(allRecords["Repository:0".hashed()]!!.fields.containsKey("starGazers")) + assertTrue(allRecords["Repository:1".hashed()]!!.fields.containsKey("stars")) + assertTrue(allRecords["Repository:1".hashed()]!!.fields.containsKey("starGazers")) var removedFieldsAndRecords = store.removeStaleFields(GlobalMaxAgeProvider(Duration.INFINITE)) // Everything is stale assertEquals( setOf( - "Repository:0.__typename", - "Repository:0.id", - "Repository:0.stars", - "Repository:0.starGazers", - "User:0.__typename", - "User:0.id", - "User:0.name", - "Repository:1.__typename", - "Repository:1.id", - "Repository:1.stars", - "Repository:1.starGazers", - "User:2.__typename", - "User:2.id", - "User:2.name", + "Repository:0".hashed() + ".__typename", + "Repository:0".hashed() + ".id", + "Repository:0".hashed() + ".stars", + "Repository:0".hashed() + ".starGazers", + "User:0".hashed() + ".__typename", + "User:0".hashed() + ".id", + "User:0".hashed() + ".name", + "Repository:1".hashed() + ".__typename", + "Repository:1".hashed() + ".id", + "Repository:1".hashed() + ".stars", + "Repository:1".hashed() + ".starGazers", + "User:2".hashed() + ".__typename", + "User:2".hashed() + ".id", + "User:2".hashed() + ".name", "QUERY_ROOT.repositories({\"first\":15})", - "User:1.__typename", - "User:1.id", - "User:1.name" + "User:1".hashed() + ".__typename", + "User:1".hashed() + ".id", + "User:1".hashed() + ".name" ), removedFieldsAndRecords.removedFields ) assertEquals( setOf( - CacheKey("Repository:0"), - CacheKey("Repository:1"), - CacheKey("User:0"), - CacheKey("User:1"), - CacheKey("User:2"), + CacheKey("Repository:0".hashed()), + CacheKey("Repository:1".hashed()), + CacheKey("User:0".hashed()), + CacheKey("User:1".hashed()), + CacheKey("User:2".hashed()), CacheKey("QUERY_ROOT"), ), removedFieldsAndRecords.removedRecords ) @@ -241,10 +242,10 @@ class StaleFieldsTest { removedFieldsAndRecords.removedRecords ) allRecords = store.accessCache { it.allRecords() } - assertTrue(allRecords["Repository:0"]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:0"]!!.fields.containsKey("starGazers")) - assertTrue(allRecords["Repository:1"]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:1"]!!.fields.containsKey("starGazers")) + assertTrue(allRecords["Repository:0".hashed()]!!.fields.containsKey("stars")) + assertTrue(allRecords["Repository:0".hashed()]!!.fields.containsKey("starGazers")) + assertTrue(allRecords["Repository:1".hashed()]!!.fields.containsKey("stars")) + assertTrue(allRecords["Repository:1".hashed()]!!.fields.containsKey("starGazers")) } } diff --git a/tests/include-skip-operation-based/src/commonTest/kotlin/IncludeTest.kt b/tests/include-skip-operation-based/src/commonTest/kotlin/IncludeTest.kt index fb5767aa..bdf7df02 100644 --- a/tests/include-skip-operation-based/src/commonTest/kotlin/IncludeTest.kt +++ b/tests/include-skip-operation-based/src/commonTest/kotlin/IncludeTest.kt @@ -5,6 +5,7 @@ import com.apollographql.apollo.api.Optional import com.apollographql.apollo.api.json.MapJsonReader import com.apollographql.apollo.api.toApolloResponse import com.apollographql.apollo.testing.internal.runTest +import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.internal.normalized import com.example.GetCatIncludeVariableWithDefaultQuery import com.example.SkipFragmentWithDefaultToFalseQuery @@ -29,7 +30,7 @@ class IncludeTest { } val normalized = data.normalized(operation) - assertNull((normalized["animal"] as Map<*, *>)["species"]) + assertNull((normalized["animal".hashed()] as Map<*, *>)["species"]) } @Test @@ -43,6 +44,6 @@ class IncludeTest { } val normalized = data.normalized(operation) - assertNull((normalized["animal"] as Map<*, *>)["barf"]) + assertNull((normalized["animal".hashed()] as Map<*, *>)["barf"]) } } diff --git a/tests/models-operation-based-with-interfaces/src/commonTest/kotlin/test/StoreTest.kt b/tests/models-operation-based-with-interfaces/src/commonTest/kotlin/test/StoreTest.kt index 829e8138..89800b8d 100644 --- a/tests/models-operation-based-with-interfaces/src/commonTest/kotlin/test/StoreTest.kt +++ b/tests/models-operation-based-with-interfaces/src/commonTest/kotlin/test/StoreTest.kt @@ -11,6 +11,7 @@ import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.IdCacheKeyGenerator +import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.store import com.apollographql.mockserver.MockServer @@ -44,7 +45,7 @@ class StoreTest { val heroWithFriendsFragment = store.readFragment( HeroWithFriendsFragmentImpl(), - CacheKey("Character:2001"), + CacheKey("Character:2001".hashed()), ).data assertEquals(heroWithFriendsFragment.id, "2001") assertEquals(heroWithFriendsFragment.name, "R2-D2") @@ -58,7 +59,7 @@ class StoreTest { var fragment = store.readFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1000"), + CacheKey("Character:1000".hashed()), ).data assertEquals(fragment.id, "1000") @@ -66,14 +67,14 @@ class StoreTest { fragment = store.readFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1002"), + CacheKey("Character:1002".hashed()), ).data assertEquals(fragment.id, "1002") assertEquals(fragment.name, "Han Solo") fragment = store.readFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1003"), + CacheKey("Character:1003".hashed()), ).data assertEquals(fragment.id, "1003") assertEquals(fragment.name, "Leia Organa") @@ -100,7 +101,7 @@ class StoreTest { store.writeFragment( HeroWithFriendsFragmentImpl(), - CacheKey("Character:2001"), + CacheKey("Character:2001".hashed()), HeroWithFriendsFragment( "2001", "R222-D222", @@ -125,7 +126,7 @@ class StoreTest { store.writeFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1002"), + CacheKey("Character:1002".hashed()), HumanWithIdFragment( "1002", "Beast" diff --git a/tests/models-operation-based/src/commonTest/kotlin/test/StoreTest.kt b/tests/models-operation-based/src/commonTest/kotlin/test/StoreTest.kt index 5c500a81..3b27d844 100644 --- a/tests/models-operation-based/src/commonTest/kotlin/test/StoreTest.kt +++ b/tests/models-operation-based/src/commonTest/kotlin/test/StoreTest.kt @@ -11,6 +11,7 @@ import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.IdCacheKeyGenerator +import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.store import com.apollographql.mockserver.MockServer @@ -44,7 +45,7 @@ class StoreTest { val heroWithFriendsFragment = store.readFragment( HeroWithFriendsFragmentImpl(), - CacheKey("Character:2001"), + CacheKey("Character:2001".hashed()), ).data assertEquals(heroWithFriendsFragment.id, "2001") assertEquals(heroWithFriendsFragment.name, "R2-D2") @@ -58,7 +59,7 @@ class StoreTest { var fragment = store.readFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1000"), + CacheKey("Character:1000".hashed()), ).data assertEquals(fragment.id, "1000") @@ -66,14 +67,14 @@ class StoreTest { fragment = store.readFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1002"), + CacheKey("Character:1002".hashed()), ).data assertEquals(fragment.id, "1002") assertEquals(fragment.name, "Han Solo") fragment = store.readFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1003"), + CacheKey("Character:1003".hashed()), ).data assertEquals(fragment.id, "1003") assertEquals(fragment.name, "Leia Organa") @@ -100,7 +101,7 @@ class StoreTest { store.writeFragment( HeroWithFriendsFragmentImpl(), - CacheKey("Character:2001"), + CacheKey("Character:2001".hashed()), HeroWithFriendsFragment( "2001", "R222-D222", @@ -125,7 +126,7 @@ class StoreTest { store.writeFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1002"), + CacheKey("Character:1002".hashed()), HumanWithIdFragment( "1002", "Beast" diff --git a/tests/models-response-based/src/commonTest/kotlin/test/StoreTest.kt b/tests/models-response-based/src/commonTest/kotlin/test/StoreTest.kt index c02b0d84..8798efa2 100644 --- a/tests/models-response-based/src/commonTest/kotlin/test/StoreTest.kt +++ b/tests/models-response-based/src/commonTest/kotlin/test/StoreTest.kt @@ -12,6 +12,7 @@ import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.IdCacheKeyGenerator +import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.store import com.apollographql.mockserver.MockServer @@ -45,7 +46,7 @@ class StoreTest { val heroWithFriendsFragment = store.readFragment( HeroWithFriendsFragmentImpl(), - CacheKey("Character:2001"), + CacheKey("Character:2001".hashed()), ).data assertEquals(heroWithFriendsFragment.id, "2001") assertEquals(heroWithFriendsFragment.name, "R2-D2") @@ -59,7 +60,7 @@ class StoreTest { var fragment = store.readFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1000"), + CacheKey("Character:1000".hashed()), ).data assertEquals(fragment.id, "1000") @@ -67,14 +68,14 @@ class StoreTest { fragment = store.readFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1002"), + CacheKey("Character:1002".hashed()), ).data assertEquals(fragment.id, "1002") assertEquals(fragment.name, "Han Solo") fragment = store.readFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1003"), + CacheKey("Character:1003".hashed()), ).data assertEquals(fragment.id, "1003") assertEquals(fragment.name, "Leia Organa") @@ -101,7 +102,7 @@ class StoreTest { store.writeFragment( HeroWithFriendsFragmentImpl(), - CacheKey("Character:2001"), + CacheKey("Character:2001".hashed()), HeroWithFriendsFragmentImpl.Data( id = "2001", name = "R222-D222", @@ -122,7 +123,7 @@ class StoreTest { store.writeFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1002"), + CacheKey("Character:1002".hashed()), HumanWithIdFragmentImpl.Data( id = "1002", name = "Beast" diff --git a/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt b/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt index e75a07ed..b6dadb34 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt @@ -8,6 +8,7 @@ import com.apollographql.cache.normalized.api.DefaultRecordMerger import com.apollographql.cache.normalized.api.IdCacheKeyGenerator import com.apollographql.cache.normalized.api.NormalizedCache import com.apollographql.cache.normalized.api.Record +import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.internal.normalized import com.apollographql.cache.normalized.memory.MemoryCacheFactory import httpcache.AllPlanetsQuery @@ -46,7 +47,7 @@ class NormalizerTest { val records = records(HeroNameQuery(), "HeroNameResponse.json") val record = records.get(rootKey) val reference = record!!["hero"] as CacheKey? - assertEquals(reference, CacheKey("hero")) + assertEquals(reference, CacheKey("hero".hashed())) val heroRecord = records.get(reference!!.key) assertEquals(heroRecord!!["name"], "R2-D2") } @@ -77,7 +78,7 @@ class NormalizerTest { val records = records(EpisodeHeroNameQuery(Episode.JEDI), "EpisodeHeroNameResponse.json") val record = records.get(rootKey) val reference = record!![TEST_FIELD_KEY_JEDI] as CacheKey? - assertEquals(reference, CacheKey(TEST_FIELD_KEY_JEDI)) + assertEquals(reference, CacheKey(TEST_FIELD_KEY_JEDI.hashed())) val heroRecord = records.get(reference!!.key) assertEquals(heroRecord!!["name"], "R2-D2") } @@ -90,7 +91,7 @@ class NormalizerTest { val rootRecord = records.get(rootKey)!! val heroReference = rootRecord["hero"] as CacheKey? - assertEquals(heroReference, CacheKey("hero")) + assertEquals(heroReference, CacheKey("hero".hashed())) val hero = records.get(heroReference!!.key) assertEquals(hero?.get("appearsIn"), listOf("NEWHOPE", "EMPIRE", "JEDI")) @@ -102,18 +103,18 @@ class NormalizerTest { val records = records(HeroAndFriendsNamesQuery(Episode.JEDI), "HeroAndFriendsNameResponse.json") val record = records.get(rootKey) val heroReference = record!![TEST_FIELD_KEY_JEDI] as CacheKey? - assertEquals(heroReference, CacheKey(TEST_FIELD_KEY_JEDI)) + assertEquals(heroReference, CacheKey(TEST_FIELD_KEY_JEDI.hashed())) val heroRecord = records.get(heroReference!!.key) assertEquals(heroRecord!!["name"], "R2-D2") assertEquals( listOf( - CacheKey("$TEST_FIELD_KEY_JEDI.friends.0"), - CacheKey("$TEST_FIELD_KEY_JEDI.friends.1"), - CacheKey("$TEST_FIELD_KEY_JEDI.friends.2") + CacheKey("${TEST_FIELD_KEY_JEDI.hashed()}.friends.0".hashed()), + CacheKey("${TEST_FIELD_KEY_JEDI.hashed()}.friends.1".hashed()), + CacheKey("${TEST_FIELD_KEY_JEDI.hashed()}.friends.2".hashed()) ), heroRecord["friends"] ) - val luke = records.get("$TEST_FIELD_KEY_JEDI.friends.0") + val luke = records.get("${TEST_FIELD_KEY_JEDI.hashed()}.friends.0".hashed()) assertEquals(luke!!["name"], "Luke Skywalker") } @@ -123,18 +124,18 @@ class NormalizerTest { val records = records(HeroAndFriendsNamesWithIDsQuery(Episode.JEDI), "HeroAndFriendsNameWithIdsResponse.json") val record = records.get(rootKey) val heroReference = record!![TEST_FIELD_KEY_JEDI] as CacheKey? - assertEquals(CacheKey("Character:2001"), heroReference) + assertEquals(CacheKey("Character:2001".hashed()), heroReference) val heroRecord = records.get(heroReference!!.key) assertEquals(heroRecord!!["name"], "R2-D2") assertEquals( listOf( - CacheKey("Character:1000"), - CacheKey("Character:1002"), - CacheKey("Character:1003") + CacheKey("Character:1000".hashed()), + CacheKey("Character:1002".hashed()), + CacheKey("Character:1003".hashed()) ), heroRecord["friends"] ) - val luke = records.get("Character:1000") + val luke = records.get("Character:1000".hashed()) assertEquals(luke!!["name"], "Luke Skywalker") } @@ -144,18 +145,18 @@ class NormalizerTest { val records = records(HeroAndFriendsNamesWithIDForParentOnlyQuery(Episode.JEDI), "HeroAndFriendsNameWithIdsParentOnlyResponse.json") val record = records[rootKey] val heroReference = record!![TEST_FIELD_KEY_JEDI] as CacheKey? - assertEquals(CacheKey("Character:2001"), heroReference) + assertEquals(CacheKey("Character:2001".hashed()), heroReference) val heroRecord = records.get(heroReference!!.key) assertEquals(heroRecord!!["name"], "R2-D2") assertEquals( listOf( - CacheKey("Character:2001.friends.0"), - CacheKey("Character:2001.friends.1"), - CacheKey("Character:2001.friends.2") + CacheKey("${"Character:2001".hashed()}.friends.0".hashed()), + CacheKey("${"Character:2001".hashed()}.friends.1".hashed()), + CacheKey("${"Character:2001".hashed()}.friends.2".hashed()) ), heroRecord["friends"] ) - val luke = records.get("Character:2001.friends.0") + val luke = records.get("${"Character:2001".hashed()}.friends.0".hashed()) assertEquals(luke!!["name"], "Luke Skywalker") } @@ -208,29 +209,29 @@ class NormalizerTest { @Throws(Exception::class) fun testHeroParentTypeDependentFieldDroid() { val records = records(HeroParentTypeDependentFieldQuery(Episode.JEDI), "HeroParentTypeDependentFieldDroidResponse.json") - val lukeRecord = records.get(TEST_FIELD_KEY_JEDI + ".friends.0") + val lukeRecord = records.get((TEST_FIELD_KEY_JEDI.hashed() + ".friends.0").hashed()) assertEquals(lukeRecord!!["name"], "Luke Skywalker") assertEquals(lukeRecord["height({\"unit\":\"METER\"})"], 1.72) - val friends = records[TEST_FIELD_KEY_JEDI]!!["friends"] + val friends = records[TEST_FIELD_KEY_JEDI.hashed()]!!["friends"] assertIs>(friends) - assertEquals(friends[0], CacheKey("$TEST_FIELD_KEY_JEDI.friends.0")) - assertEquals(friends[1], CacheKey("$TEST_FIELD_KEY_JEDI.friends.1")) - assertEquals(friends[2], CacheKey("$TEST_FIELD_KEY_JEDI.friends.2")) + assertEquals(friends[0], CacheKey((TEST_FIELD_KEY_JEDI.hashed() + ".friends.0").hashed())) + assertEquals(friends[1], CacheKey((TEST_FIELD_KEY_JEDI.hashed() + ".friends.1").hashed())) + assertEquals(friends[2], CacheKey((TEST_FIELD_KEY_JEDI.hashed() + ".friends.2").hashed())) } @Test fun list_of_objects_with_null_object() { val records = records(AllPlanetsQuery(), "AllPlanetsListOfObjectWithNullObject.json") - val fieldKey = "allPlanets({\"first\":300})" + val fieldKey = "allPlanets({\"first\":300})".hashed() - var record: Record? = records["$fieldKey.planets.0"] + var record: Record? = records["$fieldKey.planets.0".hashed()] assertTrue(record?.get("filmConnection") == null) - record = records.get("$fieldKey.planets.0.filmConnection") + record = records.get("${"$fieldKey.planets.0".hashed()}.filmConnection".hashed()) as Record? assertTrue(record == null) - record = records.get("$fieldKey.planets.1.filmConnection") + record = records.get("${"$fieldKey.planets.1".hashed()}.filmConnection".hashed()) assertTrue(record != null) } @@ -240,7 +241,7 @@ class NormalizerTest { fun testHeroParentTypeDependentFieldHuman() { val records = records(HeroParentTypeDependentFieldQuery(Episode.EMPIRE), "HeroParentTypeDependentFieldHumanResponse.json") - val lukeRecord = records.get("$TEST_FIELD_KEY_EMPIRE.friends.0") + val lukeRecord = records.get("${TEST_FIELD_KEY_EMPIRE.hashed()}.friends.0".hashed()) assertEquals(lukeRecord!!["name"], "Han Solo") assertEquals(lukeRecord["height({\"unit\":\"FOOT\"})"], 5.905512) } diff --git a/tests/normalized-cache/src/commonTest/kotlin/OptimisticCacheTest.kt b/tests/normalized-cache/src/commonTest/kotlin/OptimisticCacheTest.kt index 5abe1a68..3323fa68 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/OptimisticCacheTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/OptimisticCacheTest.kt @@ -9,6 +9,7 @@ import com.apollographql.cache.normalized.FetchPolicy import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.IdCacheKeyGenerator import com.apollographql.cache.normalized.fetchPolicy +import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.optimisticUpdates import com.apollographql.cache.normalized.refetchPolicy @@ -122,7 +123,7 @@ class OptimisticCacheTest { store.writeOptimisticUpdates( HeroAndFriendsNamesFragmentImpl(), mutationId = mutationId, - cacheKey = CacheKey("""hero({"episode":"JEDI"})"""), + cacheKey = CacheKey("""hero({"episode":"JEDI"})""".hashed()), data = data, ).also { store.publish(it) diff --git a/tests/normalized-cache/src/commonTest/kotlin/OtherCacheTest.kt b/tests/normalized-cache/src/commonTest/kotlin/OtherCacheTest.kt index bbc92fc2..f3d9072d 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/OtherCacheTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/OtherCacheTest.kt @@ -9,6 +9,7 @@ import com.apollographql.cache.normalized.FetchPolicy import com.apollographql.cache.normalized.api.IdCacheKeyGenerator import com.apollographql.cache.normalized.api.IdCacheKeyResolver import com.apollographql.cache.normalized.fetchPolicy +import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.store import com.apollographql.mockserver.MockServer @@ -72,7 +73,7 @@ class OtherCacheTest { // Some details are not present in the master query, we should get a cache miss val e = apolloClient.query(CharacterDetailsQuery("1002")).fetchPolicy(FetchPolicy.CacheOnly).execute().exception as CacheMissException - assertTrue(e.message!!.contains("Object 'Character:1002' has no field named '__typename'")) + assertTrue(e.message!!.contains("Object '${"Character:1002".hashed()}' has no field named '__typename'")) } diff --git a/tests/normalized-cache/src/commonTest/kotlin/StoreTest.kt b/tests/normalized-cache/src/commonTest/kotlin/StoreTest.kt index 5ac9b9d9..1236045a 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/StoreTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/StoreTest.kt @@ -11,12 +11,14 @@ import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.IdCacheKeyGenerator import com.apollographql.cache.normalized.api.IdCacheKeyResolver import com.apollographql.cache.normalized.fetchPolicy +import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.isFromCache import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.store import normalizer.CharacterNameByIdQuery import normalizer.HeroAndFriendsNamesWithIDsQuery import normalizer.type.Episode +import kotlin.test.Ignore import kotlin.test.Test import kotlin.test.assertEquals import kotlin.test.assertIs @@ -41,7 +43,7 @@ class StoreTest { assertFriendIsCached("1002", "Han Solo") // remove the root query object - var removed = store.remove(CacheKey("Character:2001")) + var removed = store.remove(CacheKey("Character:2001".hashed())) assertEquals(true, removed) // Trying to get the full response should fail @@ -52,7 +54,7 @@ class StoreTest { assertFriendIsCached("1002", "Han Solo") // remove a single object from the list - removed = store.remove(CacheKey("Character:1002")) + removed = store.remove(CacheKey("Character:1002".hashed())) assertEquals(true, removed) // Trying to get the full response should fail @@ -74,7 +76,7 @@ class StoreTest { assertFriendIsCached("1003", "Leia Organa") // Now remove multiple keys - val removed = store.remove(listOf(CacheKey("Character:1002"), CacheKey("Character:1000"))) + val removed = store.remove(listOf(CacheKey("Character:1002".hashed()), CacheKey("Character:1000".hashed()))) assertEquals(2, removed) @@ -95,7 +97,7 @@ class StoreTest { assertFriendIsCached("1003", "Leia Organa") // test remove root query object - val removed = store.remove(CacheKey("Character:2001"), true) + val removed = store.remove(CacheKey("Character:2001".hashed()), true) assertEquals(true, removed) // Nothing should be cached anymore @@ -105,6 +107,7 @@ class StoreTest { assertFriendIsNotCached("1003") } + @Ignore @Test @Throws(Exception::class) fun directAccess() = runTest(before = { setUp() }) { diff --git a/tests/normalized-cache/src/commonTest/kotlin/fragmentnormalizer/FragmentNormalizerTest.kt b/tests/normalized-cache/src/commonTest/kotlin/fragmentnormalizer/FragmentNormalizerTest.kt index 3f2729f3..d464fa15 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/fragmentnormalizer/FragmentNormalizerTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/fragmentnormalizer/FragmentNormalizerTest.kt @@ -6,6 +6,7 @@ import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.IdCacheKeyGenerator import com.apollographql.cache.normalized.apolloStore +import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.internal.normalized import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.normalizedCache @@ -99,6 +100,6 @@ class FragmentNormalizerTest { cacheKeyGenerator = IdCacheKeyGenerator(), ) - assertContains(records.keys, "1.author") + assertContains(records.keys, "1.author".hashed()) } } diff --git a/tests/normalized-cache/src/jvmTest/kotlin/CacheMissLoggingInterceptorTest.kt b/tests/normalized-cache/src/jvmTest/kotlin/CacheMissLoggingInterceptorTest.kt index 403db40c..8adfcfe5 100644 --- a/tests/normalized-cache/src/jvmTest/kotlin/CacheMissLoggingInterceptorTest.kt +++ b/tests/normalized-cache/src/jvmTest/kotlin/CacheMissLoggingInterceptorTest.kt @@ -4,6 +4,7 @@ import com.apollographql.apollo.ApolloClient import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.FetchPolicy import com.apollographql.cache.normalized.fetchPolicy +import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.logCacheMisses import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.normalizedCache @@ -54,7 +55,7 @@ class CacheMissLoggingInterceptorTest { assertEquals( listOf( "Object 'QUERY_ROOT' has no field named 'hero'", - "Object 'hero' has no field named 'appearsIn'" + "Object '${"hero".hashed()}' has no field named 'appearsIn'" ), recordedLogs ) diff --git a/tests/pagination/src/commonTest/kotlin/ConnectionPaginationTest.kt b/tests/pagination/src/commonTest/kotlin/ConnectionPaginationTest.kt index deca3d04..1c43da4e 100644 --- a/tests/pagination/src/commonTest/kotlin/ConnectionPaginationTest.kt +++ b/tests/pagination/src/commonTest/kotlin/ConnectionPaginationTest.kt @@ -72,7 +72,6 @@ class ConnectionPaginationTest { apolloStore.writeOperation(query1, data1) var dataFromStore = apolloStore.readOperation(query1).data assertEquals(data1, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Page after val query2 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx43")) @@ -135,7 +134,6 @@ class ConnectionPaginationTest { } } assertEquals(expectedData, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Page after val query3 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx45")) @@ -210,7 +208,6 @@ class ConnectionPaginationTest { } } assertEquals(expectedData, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Page before val query4 = UsersQuery(last = Optional.Present(2), before = Optional.Present("xx42")) @@ -297,7 +294,6 @@ class ConnectionPaginationTest { } } assertEquals(expectedData, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Non-contiguous page (should reset) val query5 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx50")) @@ -326,7 +322,6 @@ class ConnectionPaginationTest { apolloStore.writeOperation(query5, data5) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Empty page (should keep previous result) val query6 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx51")) @@ -342,7 +337,6 @@ class ConnectionPaginationTest { apolloStore.writeOperation(query6, data6) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) - assertChainedCachesAreEqual(apolloStore) } } diff --git a/tests/pagination/src/commonTest/kotlin/ConnectionProgrammaticPaginationTest.kt b/tests/pagination/src/commonTest/kotlin/ConnectionProgrammaticPaginationTest.kt index f20ea377..debdf44b 100644 --- a/tests/pagination/src/commonTest/kotlin/ConnectionProgrammaticPaginationTest.kt +++ b/tests/pagination/src/commonTest/kotlin/ConnectionProgrammaticPaginationTest.kt @@ -82,7 +82,6 @@ class ConnectionProgrammaticPaginationTest { apolloStore.writeOperation(query1, data1) var dataFromStore = apolloStore.readOperation(query1).data assertEquals(data1, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Page after val query2 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx43")) @@ -145,7 +144,6 @@ class ConnectionProgrammaticPaginationTest { } } assertEquals(expectedData, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Page after val query3 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx45")) @@ -220,7 +218,6 @@ class ConnectionProgrammaticPaginationTest { } } assertEquals(expectedData, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Page before val query4 = UsersQuery(last = Optional.Present(2), before = Optional.Present("xx42")) @@ -307,7 +304,6 @@ class ConnectionProgrammaticPaginationTest { } } assertEquals(expectedData, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Non-contiguous page (should reset) val query5 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx50")) @@ -336,7 +332,6 @@ class ConnectionProgrammaticPaginationTest { apolloStore.writeOperation(query5, data5) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Empty page (should keep previous result) val query6 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx51")) @@ -352,7 +347,6 @@ class ConnectionProgrammaticPaginationTest { apolloStore.writeOperation(query6, data6) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) - assertChainedCachesAreEqual(apolloStore) } } diff --git a/tests/pagination/src/commonTest/kotlin/ConnectionWithNodesPaginationTest.kt b/tests/pagination/src/commonTest/kotlin/ConnectionWithNodesPaginationTest.kt index 50130b82..6555b282 100644 --- a/tests/pagination/src/commonTest/kotlin/ConnectionWithNodesPaginationTest.kt +++ b/tests/pagination/src/commonTest/kotlin/ConnectionWithNodesPaginationTest.kt @@ -65,7 +65,6 @@ class ConnectionWithNodesPaginationTest { apolloStore.writeOperation(query1, data1) var dataFromStore = apolloStore.readOperation(query1).data assertEquals(data1, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Page after val query2 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx43")) @@ -110,7 +109,6 @@ class ConnectionWithNodesPaginationTest { } } assertEquals(expectedData, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Page after val query3 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx45")) @@ -161,7 +159,6 @@ class ConnectionWithNodesPaginationTest { } } assertEquals(expectedData, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Page before val query4 = UsersQuery(last = Optional.Present(2), before = Optional.Present("xx42")) @@ -218,7 +215,6 @@ class ConnectionWithNodesPaginationTest { } } assertEquals(expectedData, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Non-contiguous page (should reset) val query5 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx50")) @@ -241,7 +237,6 @@ class ConnectionWithNodesPaginationTest { apolloStore.writeOperation(query5, data5) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Empty page (should keep previous result) val query6 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx51")) @@ -258,7 +253,6 @@ class ConnectionWithNodesPaginationTest { apolloStore.writeOperation(query6, data6) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) - assertChainedCachesAreEqual(apolloStore) } } diff --git a/tests/pagination/src/commonTest/kotlin/CursorBasedPaginationTest.kt b/tests/pagination/src/commonTest/kotlin/CursorBasedPaginationTest.kt index 1449e92a..1ced1369 100644 --- a/tests/pagination/src/commonTest/kotlin/CursorBasedPaginationTest.kt +++ b/tests/pagination/src/commonTest/kotlin/CursorBasedPaginationTest.kt @@ -79,7 +79,6 @@ class CursorBasedPaginationTest { apolloStore.writeOperation(query1, data1) var dataFromStore = apolloStore.readOperation(query1).data assertEquals(data1, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Page after val query2 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx43")) @@ -142,7 +141,6 @@ class CursorBasedPaginationTest { } } assertEquals(expectedData, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Page after val query3 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx45")) @@ -217,7 +215,6 @@ class CursorBasedPaginationTest { } } assertEquals(expectedData, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Page before val query4 = UsersQuery(last = Optional.Present(2), before = Optional.Present("xx42")) @@ -304,7 +301,6 @@ class CursorBasedPaginationTest { } } assertEquals(expectedData, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Non-contiguous page (should reset) val query5 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx50")) @@ -333,7 +329,6 @@ class CursorBasedPaginationTest { apolloStore.writeOperation(query5, data5) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Empty page (should keep previous result) val query6 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx51")) @@ -349,7 +344,6 @@ class CursorBasedPaginationTest { apolloStore.writeOperation(query6, data6) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) - assertChainedCachesAreEqual(apolloStore) } @Test diff --git a/tests/pagination/src/commonTest/kotlin/OffsetBasedWithArrayPaginationTest.kt b/tests/pagination/src/commonTest/kotlin/OffsetBasedWithArrayPaginationTest.kt index 69925d93..1be429b5 100644 --- a/tests/pagination/src/commonTest/kotlin/OffsetBasedWithArrayPaginationTest.kt +++ b/tests/pagination/src/commonTest/kotlin/OffsetBasedWithArrayPaginationTest.kt @@ -56,7 +56,6 @@ class OffsetBasedWithArrayPaginationTest { apolloStore.writeOperation(query1, data1) var dataFromStore = apolloStore.readOperation(query1).data assertEquals(data1, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Page after val query2 = UsersQuery(offset = Optional.Present(44), limit = Optional.Present(2)) @@ -77,7 +76,6 @@ class OffsetBasedWithArrayPaginationTest { ) } assertEquals(expectedData, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Page in the middle val query3 = UsersQuery(offset = Optional.Present(44), limit = Optional.Present(3)) @@ -100,7 +98,6 @@ class OffsetBasedWithArrayPaginationTest { ) } assertEquals(expectedData, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Page before val query4 = UsersQuery(offset = Optional.Present(40), limit = Optional.Present(2)) @@ -124,7 +121,6 @@ class OffsetBasedWithArrayPaginationTest { ) } assertEquals(expectedData, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Non-contiguous page (should reset) val query5 = UsersQuery(offset = Optional.Present(50), limit = Optional.Present(2)) @@ -137,7 +133,6 @@ class OffsetBasedWithArrayPaginationTest { apolloStore.writeOperation(query5, data5) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Empty page (should keep previous result) val query6 = UsersQuery(offset = Optional.Present(52), limit = Optional.Present(2)) @@ -147,7 +142,6 @@ class OffsetBasedWithArrayPaginationTest { apolloStore.writeOperation(query6, data6) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) - assertChainedCachesAreEqual(apolloStore) } private class OffsetPaginationMetadataGenerator(private val fieldName: String) : MetadataGenerator { diff --git a/tests/pagination/src/commonTest/kotlin/OffsetBasedWithPageAndInputPaginationTest.kt b/tests/pagination/src/commonTest/kotlin/OffsetBasedWithPageAndInputPaginationTest.kt index 07b0bd70..907f2584 100644 --- a/tests/pagination/src/commonTest/kotlin/OffsetBasedWithPageAndInputPaginationTest.kt +++ b/tests/pagination/src/commonTest/kotlin/OffsetBasedWithPageAndInputPaginationTest.kt @@ -66,7 +66,6 @@ class OffsetBasedWithPageAndInputPaginationTest { apolloStore.writeOperation(query1, data1) var dataFromStore = apolloStore.readOperation(query1).data assertEquals(data1, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Page after val query2 = UsersQuery(offset = Optional.Present(44), limit = Optional.Present(2)) @@ -91,7 +90,6 @@ class OffsetBasedWithPageAndInputPaginationTest { } } assertEquals(expectedData, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Page in the middle val query3 = UsersQuery(offset = Optional.Present(44), limit = Optional.Present(3)) @@ -118,7 +116,6 @@ class OffsetBasedWithPageAndInputPaginationTest { } } assertEquals(expectedData, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Page before val query4 = UsersQuery(offset = Optional.Present(40), limit = Optional.Present(2)) @@ -146,7 +143,6 @@ class OffsetBasedWithPageAndInputPaginationTest { } } assertEquals(expectedData, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Non-contiguous page (should reset) val query5 = UsersQuery(offset = Optional.Present(50), limit = Optional.Present(2)) @@ -161,7 +157,6 @@ class OffsetBasedWithPageAndInputPaginationTest { apolloStore.writeOperation(query5, data5) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Empty page (should keep previous result) val query6 = UsersQuery(offset = Optional.Present(52), limit = Optional.Present(2)) @@ -173,7 +168,6 @@ class OffsetBasedWithPageAndInputPaginationTest { apolloStore.writeOperation(query6, data6) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) - assertChainedCachesAreEqual(apolloStore) } private class OffsetPaginationMetadataGenerator(private val typeName: String) : MetadataGenerator { diff --git a/tests/pagination/src/commonTest/kotlin/OffsetBasedWithPagePaginationTest.kt b/tests/pagination/src/commonTest/kotlin/OffsetBasedWithPagePaginationTest.kt index e6e4ddec..8b0100b5 100644 --- a/tests/pagination/src/commonTest/kotlin/OffsetBasedWithPagePaginationTest.kt +++ b/tests/pagination/src/commonTest/kotlin/OffsetBasedWithPagePaginationTest.kt @@ -9,7 +9,6 @@ import com.apollographql.cache.normalized.api.FieldRecordMerger import com.apollographql.cache.normalized.api.MetadataGenerator import com.apollographql.cache.normalized.api.MetadataGeneratorContext import com.apollographql.cache.normalized.api.NormalizedCacheFactory -import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.TypePolicyCacheKeyGenerator import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.sql.SqlNormalizedCacheFactory @@ -60,7 +59,6 @@ class OffsetBasedWithPagePaginationTest { apolloStore.writeOperation(query1, data1) var dataFromStore = apolloStore.readOperation(query1).data assertEquals(data1, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Page after val query2 = UsersQuery(offset = Optional.Present(44), limit = Optional.Present(2)) @@ -85,7 +83,6 @@ class OffsetBasedWithPagePaginationTest { } } assertEquals(expectedData, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Page in the middle val query3 = UsersQuery(offset = Optional.Present(44), limit = Optional.Present(3)) @@ -112,7 +109,6 @@ class OffsetBasedWithPagePaginationTest { } } assertEquals(expectedData, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Page before val query4 = UsersQuery(offset = Optional.Present(40), limit = Optional.Present(2)) @@ -140,7 +136,6 @@ class OffsetBasedWithPagePaginationTest { } } assertEquals(expectedData, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Non-contiguous page (should reset) val query5 = UsersQuery(offset = Optional.Present(50), limit = Optional.Present(2)) @@ -155,7 +150,6 @@ class OffsetBasedWithPagePaginationTest { apolloStore.writeOperation(query5, data5) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) - assertChainedCachesAreEqual(apolloStore) // Empty page (should keep previous result) val query6 = UsersQuery(offset = Optional.Present(52), limit = Optional.Present(2)) @@ -167,7 +161,6 @@ class OffsetBasedWithPagePaginationTest { apolloStore.writeOperation(query6, data6) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) - assertChainedCachesAreEqual(apolloStore) } private class OffsetPaginationMetadataGenerator(private val typeName: String) : MetadataGenerator { @@ -225,21 +218,3 @@ class OffsetBasedWithPagePaginationTest { } } } - -internal fun assertChainedCachesAreEqual(apolloStore: ApolloStore) { - val dump = apolloStore.dump().filterKeys { - // Ignore optimistic cache for comparison - it.simpleName != "OptimisticNormalizedCache" - } - if (dump.size < 2) return - val caches = dump.values.toList() - val cache1: Map = caches[0] - val cache2: Map = caches[1] - for (key in cache1.keys) { - val record1 = cache1[key]!! - val record2 = cache2[key]!! - assertEquals(record1.key, record2.key) - assertEquals(record1.fields, record2.fields) - assertEquals(record1.metadata, record2.metadata) - } -} diff --git a/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt b/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt index 2717146b..2b48c423 100644 --- a/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt +++ b/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt @@ -23,6 +23,7 @@ import com.apollographql.cache.normalized.apolloStore import com.apollographql.cache.normalized.fetchFromCache import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.fetchPolicyInterceptor +import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.normalizedCache import com.apollographql.cache.normalized.store @@ -124,7 +125,8 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Object 'User:1' has no field named 'nickName' in the cache").path(listOf("me", "nickName")).build() + Error.Builder("Object '${"User:1".hashed()}' has no field named 'nickName' in the cache").path(listOf("me", "nickName")) + .build() ), cacheMissResult.errors ) @@ -306,7 +308,7 @@ class CachePartialResultTest { ) // Remove project lead from the cache - apolloClient.apolloStore.remove(CacheKey("User", "3")) + apolloClient.apolloStore.remove(CacheKey("User:3".hashed())) val cacheResult = apolloClient.query(MeWithBestFriendQuery()) .fetchPolicyInterceptor(PartialCacheOnlyInterceptor) .execute() @@ -342,13 +344,13 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Object 'User:3' not found in the cache").path(listOf("me", "projects", 0, "lead")).build() + Error.Builder("Object '${"User:3".hashed()}' not found in the cache").path(listOf("me", "projects", 0, "lead")).build() ), cacheResult.errors ) // Remove best friend from the cache - apolloClient.apolloStore.remove(CacheKey("User", "2")) + apolloClient.apolloStore.remove(CacheKey("User:2".hashed())) val cacheResult2 = apolloClient.query(MeWithBestFriendQuery()) .fetchPolicyInterceptor(PartialCacheOnlyInterceptor) .execute() @@ -379,14 +381,14 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Object 'User:2' not found in the cache").path(listOf("me", "bestFriend")).build(), - Error.Builder("Object 'User:3' not found in the cache").path(listOf("me", "projects", 0, "lead")).build(), + Error.Builder("Object '${"User:2".hashed()}' not found in the cache").path(listOf("me", "bestFriend")).build(), + Error.Builder("Object '${"User:3".hashed()}' not found in the cache").path(listOf("me", "projects", 0, "lead")).build(), ), cacheResult2.errors ) // Remove project user from the cache - apolloClient.apolloStore.remove(CacheKey("User", "4")) + apolloClient.apolloStore.remove(CacheKey("User:4".hashed())) val cacheResult3 = apolloClient.query(MeWithBestFriendQuery()) .fetchPolicyInterceptor(PartialCacheOnlyInterceptor) .execute() @@ -394,9 +396,10 @@ class CachePartialResultTest { assertNull(cacheResult3.data) assertErrorsEquals( listOf( - Error.Builder("Object 'User:2' not found in the cache").path(listOf("me", "bestFriend")).build(), - Error.Builder("Object 'User:3' not found in the cache").path(listOf("me", "projects", 0, "lead")).build(), - Error.Builder("Object 'User:4' not found in the cache").path(listOf("me", "projects", 0, "users", 0)).build() + Error.Builder("Object '${"User:2".hashed()}' not found in the cache").path(listOf("me", "bestFriend")).build(), + Error.Builder("Object '${"User:3".hashed()}' not found in the cache").path(listOf("me", "projects", 0, "lead")).build(), + Error.Builder("Object '${"User:4".hashed()}' not found in the cache").path(listOf("me", "projects", 0, "users", 0)) + .build() ), cacheResult3.errors ) @@ -529,8 +532,8 @@ class CachePartialResultTest { // Remove the category from the cache apolloClient.apolloStore.accessCache { cache -> - val record = cache.loadRecord("User:1", CacheHeaders.NONE)!! - cache.remove(CacheKey("User", "1"), false) + val record = cache.loadRecord("User:1".hashed(), CacheHeaders.NONE)!! + cache.remove(CacheKey("User:1".hashed()), false) cache.merge(Record(record.key, record.fields - "category"), CacheHeaders.NONE, DefaultRecordMerger) } val cacheMissResult = apolloClient.query(UserByCategoryQuery(Category(2, "Second"))) @@ -540,7 +543,8 @@ class CachePartialResultTest { assertNull(cacheMissResult.data) assertErrorsEquals( listOf( - Error.Builder("Object 'User:1' has no field named 'category' in the cache").path(listOf("user", "category")).build() + Error.Builder("Object '${"User:1".hashed()}' has no field named 'category' in the cache").path(listOf("user", "category")) + .build() ), cacheMissResult.errors ) @@ -630,7 +634,7 @@ class CachePartialResultTest { ) // Remove lead from the cache - apolloClient.apolloStore.remove(CacheKey("User", "2")) + apolloClient.apolloStore.remove(CacheKey("User:2".hashed())) val cacheMissResult = apolloClient.query(WithFragmentsQuery()) .fetchPolicyInterceptor(PartialCacheOnlyInterceptor) @@ -667,7 +671,7 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Object 'User:2' not found in the cache").path(listOf("me", "mainProject", "lead0")).build() + Error.Builder("Object '${"User:2".hashed()}' not found in the cache").path(listOf("me", "mainProject", "lead0")).build() ), cacheMissResult.errors ) @@ -731,7 +735,8 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Field 'nickName' on object 'User:1' is stale in the cache").path(listOf("me", "nickName")).build() + Error.Builder("Field 'nickName' on object '${"User:1".hashed()}' is stale in the cache").path(listOf("me", "nickName")) + .build() ), cacheMissResult.errors ) @@ -795,7 +800,7 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Field 'salary' on object 'User:1.employeeInfo' is stale in the cache") + Error.Builder("Field 'salary' on object '${"${"User:1".hashed()}.employeeInfo".hashed()}' is stale in the cache") .path(listOf("me", "employeeInfo", "salary")).build() ), cacheMissResult.errors diff --git a/tests/store-errors/src/commonTest/kotlin/test/StoreErrorsTest.kt b/tests/store-errors/src/commonTest/kotlin/test/StoreErrorsTest.kt index 5b1e85fb..3325e906 100644 --- a/tests/store-errors/src/commonTest/kotlin/test/StoreErrorsTest.kt +++ b/tests/store-errors/src/commonTest/kotlin/test/StoreErrorsTest.kt @@ -18,6 +18,7 @@ import com.apollographql.cache.normalized.errorsReplaceCachedValues import com.apollographql.cache.normalized.fetchFromCache import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.fetchPolicyInterceptor +import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.sql.SqlNormalizedCacheFactory import com.apollographql.cache.normalized.store @@ -711,12 +712,12 @@ class StoreErrorsTest { dataWithErrors = dataWithErrors, customScalarAdapters = CustomScalarAdapters.Empty, ) - assertEquals("User", normalized["User:1"]!!["__typename"]) - assertEquals("1", normalized["User:1"]!!["id"]) - assertEquals("John", normalized["User:1"]!!["firstName"]) - assertEquals("Smith", normalized["User:1"]!!["lastName"]) + assertEquals("User", normalized["User:1".hashed()]!!["__typename"]) + assertEquals("1", normalized["User:1".hashed()]!!["id"]) + assertEquals("John", normalized["User:1".hashed()]!!["firstName"]) + assertEquals("Smith", normalized["User:1".hashed()]!!["lastName"]) assertErrorsEquals(Error.Builder("'nickName' can't be reached").path(listOf("me", "nickName")) - .build(), normalized["User:1"]!!["nickName"] as Error + .build(), normalized["User:1".hashed()]!!["nickName"] as Error ) } From 88bdae157db431a03ab24ca2c2e294aec2c14b48 Mon Sep 17 00:00:00 2001 From: BoD Date: Wed, 12 Mar 2025 15:00:10 +0100 Subject: [PATCH 02/29] Use CacheKey in more places --- .../api/normalized-cache-incubating.api | 68 +++++------ .../api/normalized-cache-incubating.klib.api | 76 ++++++------- .../cache/normalized/ApolloStore.kt | 16 +-- .../cache/normalized/GarbageCollection.kt | 47 ++++---- .../cache/normalized/api/CacheKey.kt | 35 +++--- .../cache/normalized/api/CacheResolver.kt | 8 +- .../cache/normalized/api/NormalizedCache.kt | 18 +-- .../normalized/api/ReadOnlyNormalizedCache.kt | 6 +- .../cache/normalized/api/Record.kt | 10 +- .../cache/normalized/api/RecordMerger.kt | 6 +- .../normalized/internal/CacheBatchReader.kt | 14 +-- .../normalized/internal/DefaultApolloStore.kt | 8 +- .../cache/normalized/internal/Normalizer.kt | 20 ++-- .../internal/OptimisticNormalizedCache.kt | 29 ++--- .../normalized/internal/RecordWeigher.kt | 2 +- .../cache/normalized/memory/MemoryCache.kt | 36 ++---- .../cache/normalized/CacheKeyResolverTest.kt | 2 +- .../normalized/DefaultRecordMergerTest.kt | 7 +- .../cache/normalized/MemoryCacheTest.kt | 24 ++-- .../cache/normalized/RecordWeigherTest.kt | 2 +- .../normalized-cache-sqlite-incubating.api | 5 +- .../normalized-cache-sqlite-incubating.api | 5 +- ...ormalized-cache-sqlite-incubating.klib.api | 7 +- .../normalized/sql/SqlNormalizedCache.kt | 25 ++-- .../sql/internal/Blob2RecordDatabase.kt | 4 +- .../sql/internal/BlobRecordDatabase.kt | 4 +- .../sql/internal/BlobRecordSerializer.kt | 2 +- .../normalized/sql/SqlNormalizedCacheTest.kt | 45 ++------ .../cache/normalized/sql/TrimTest.kt | 13 ++- .../kotlin/test/DeferNormalizedCacheTest.kt | 8 +- .../kotlin/DanglingReferencesTest.kt | 18 +-- .../commonTest/kotlin/GarbageCollectTest.kt | 18 +-- .../kotlin/ReachableCacheKeysTest.kt | 107 +++++++++--------- .../src/commonTest/kotlin/StaleFieldsTest.kt | 74 ++++++------ .../src/commonTest/kotlin/IncludeTest.kt | 6 +- .../src/commonTest/kotlin/MigrationTest.kt | 2 +- .../src/commonTest/kotlin/test/StoreTest.kt | 13 +-- .../src/commonTest/kotlin/test/StoreTest.kt | 13 +-- .../src/commonTest/kotlin/test/StoreTest.kt | 13 +-- .../src/commonTest/kotlin/MemoryCacheTest.kt | 3 +- .../src/commonTest/kotlin/NormalizerTest.kt | 104 +++++++++-------- .../commonTest/kotlin/OptimisticCacheTest.kt | 3 +- .../src/commonTest/kotlin/OtherCacheTest.kt | 4 +- .../src/commonTest/kotlin/StoreTest.kt | 25 +--- .../src/commonTest/kotlin/ThreadTests.kt | 13 +-- .../FragmentNormalizerTest.kt | 3 +- .../kotlin/MemoryCacheOnlyTest.kt | 3 +- .../kotlin/CacheMissLoggingInterceptorTest.kt | 4 +- .../kotlin/WriteToCacheAsynchronouslyTest.kt | 3 +- .../kotlin/test/CachePartialResultTest.kt | 42 ++++--- .../commonTest/kotlin/test/StoreErrorsTest.kt | 14 +-- 51 files changed, 476 insertions(+), 561 deletions(-) diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.api b/normalized-cache-incubating/api/normalized-cache-incubating.api index dff0c266..13c30061 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.api @@ -7,16 +7,16 @@ public abstract interface class com/apollographql/cache/normalized/ApolloStore { public abstract fun getChangedKeys ()Lkotlinx/coroutines/flow/SharedFlow; public abstract fun normalize (Lcom/apollographql/apollo/api/Executable;Ljava/util/Map;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;)Ljava/util/Map; public abstract fun publish (Ljava/util/Set;Lkotlin/coroutines/Continuation;)Ljava/lang/Object; - public abstract fun readFragment (Lcom/apollographql/apollo/api/Fragment;Lcom/apollographql/cache/normalized/api/CacheKey;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/ApolloStore$ReadResult; + public abstract fun readFragment-dEpVOtE (Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/ApolloStore$ReadResult; public abstract fun readOperation (Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/apollo/api/ApolloResponse; - public abstract fun remove (Lcom/apollographql/cache/normalized/api/CacheKey;Z)Z public abstract fun remove (Ljava/util/List;Z)I + public abstract fun remove-eNSUWrY (Ljava/lang/String;Z)Z public abstract fun rollbackOptimisticUpdates (Ljava/util/UUID;)Ljava/util/Set; - public abstract fun writeFragment (Lcom/apollographql/apollo/api/Fragment;Lcom/apollographql/cache/normalized/api/CacheKey;Lcom/apollographql/apollo/api/Fragment$Data;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Set; + public abstract fun writeFragment-1qdIjGk (Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/Fragment$Data;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Set; public abstract fun writeOperation (Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/Operation$Data;Ljava/util/List;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Set; public abstract fun writeOperation (Lcom/apollographql/apollo/api/Operation;Ljava/util/Map;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Set; - public abstract fun writeOptimisticUpdates (Lcom/apollographql/apollo/api/Fragment;Lcom/apollographql/cache/normalized/api/CacheKey;Lcom/apollographql/apollo/api/Fragment$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;)Ljava/util/Set; public abstract fun writeOptimisticUpdates (Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/Operation$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;)Ljava/util/Set; + public abstract fun writeOptimisticUpdates-1qdIjGk (Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/Fragment$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;)Ljava/util/Set; } public final class com/apollographql/cache/normalized/ApolloStore$Companion { @@ -25,15 +25,15 @@ public final class com/apollographql/cache/normalized/ApolloStore$Companion { public final class com/apollographql/cache/normalized/ApolloStore$DefaultImpls { public static synthetic fun normalize$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Executable;Ljava/util/Map;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;ILjava/lang/Object;)Ljava/util/Map; - public static synthetic fun readFragment$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Lcom/apollographql/cache/normalized/api/CacheKey;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/ApolloStore$ReadResult; + public static synthetic fun readFragment-dEpVOtE$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/ApolloStore$ReadResult; public static synthetic fun readOperation$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Lcom/apollographql/apollo/api/ApolloResponse; - public static synthetic fun remove$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/cache/normalized/api/CacheKey;ZILjava/lang/Object;)Z public static synthetic fun remove$default (Lcom/apollographql/cache/normalized/ApolloStore;Ljava/util/List;ZILjava/lang/Object;)I - public static synthetic fun writeFragment$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Lcom/apollographql/cache/normalized/api/CacheKey;Lcom/apollographql/apollo/api/Fragment$Data;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Ljava/util/Set; + public static synthetic fun remove-eNSUWrY$default (Lcom/apollographql/cache/normalized/ApolloStore;Ljava/lang/String;ZILjava/lang/Object;)Z + public static synthetic fun writeFragment-1qdIjGk$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/Fragment$Data;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Ljava/util/Set; public static synthetic fun writeOperation$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/Operation$Data;Ljava/util/List;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Ljava/util/Set; public static synthetic fun writeOperation$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Operation;Ljava/util/Map;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Ljava/util/Set; - public static synthetic fun writeOptimisticUpdates$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Lcom/apollographql/cache/normalized/api/CacheKey;Lcom/apollographql/apollo/api/Fragment$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;ILjava/lang/Object;)Ljava/util/Set; public static synthetic fun writeOptimisticUpdates$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/Operation$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;ILjava/lang/Object;)Ljava/util/Set; + public static synthetic fun writeOptimisticUpdates-1qdIjGk$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/Fragment$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;ILjava/lang/Object;)Ljava/util/Set; } public final class com/apollographql/cache/normalized/ApolloStore$ReadResult { @@ -227,28 +227,34 @@ public final class com/apollographql/cache/normalized/api/CacheHeaders$Companion public final class com/apollographql/cache/normalized/api/CacheKey { public static final field Companion Lcom/apollographql/cache/normalized/api/CacheKey$Companion; - public fun (Ljava/lang/String;Ljava/util/List;)V - public fun (Ljava/lang/String;Z)V - public synthetic fun (Ljava/lang/String;ZILkotlin/jvm/internal/DefaultConstructorMarker;)V - public fun (Ljava/lang/String;[Ljava/lang/String;)V + public static final synthetic fun box-impl (Ljava/lang/String;)Lcom/apollographql/cache/normalized/api/CacheKey; public static final fun canDeserialize (Ljava/lang/String;)Z - public static final fun deserialize (Ljava/lang/String;)Lcom/apollographql/cache/normalized/api/CacheKey; + public static fun constructor-impl (Ljava/lang/String;Ljava/util/List;)Ljava/lang/String; + public static fun constructor-impl (Ljava/lang/String;Z)Ljava/lang/String; + public static fun constructor-impl (Ljava/lang/String;[Ljava/lang/String;)Ljava/lang/String; + public static synthetic fun constructor-impl$default (Ljava/lang/String;ZILkotlin/jvm/internal/DefaultConstructorMarker;)Ljava/lang/String; + public static final fun deserialize-gE2UBb4 (Ljava/lang/String;)Ljava/lang/String; public fun equals (Ljava/lang/Object;)Z + public static fun equals-impl (Ljava/lang/String;Ljava/lang/Object;)Z + public static final fun equals-impl0 (Ljava/lang/String;Ljava/lang/String;)Z public final fun getKey ()Ljava/lang/String; public fun hashCode ()I - public static final fun rootKey ()Lcom/apollographql/cache/normalized/api/CacheKey; - public final fun serialize ()Ljava/lang/String; + public static fun hashCode-impl (Ljava/lang/String;)I + public static final fun rootKey-mqw0cJ0 ()Ljava/lang/String; + public static final fun serialize-impl (Ljava/lang/String;)Ljava/lang/String; public fun toString ()Ljava/lang/String; + public static fun toString-impl (Ljava/lang/String;)Ljava/lang/String; + public final synthetic fun unbox-impl ()Ljava/lang/String; } public final class com/apollographql/cache/normalized/api/CacheKey$Companion { public final fun canDeserialize (Ljava/lang/String;)Z - public final fun deserialize (Ljava/lang/String;)Lcom/apollographql/cache/normalized/api/CacheKey; - public final fun rootKey ()Lcom/apollographql/cache/normalized/api/CacheKey; + public final fun deserialize-gE2UBb4 (Ljava/lang/String;)Ljava/lang/String; + public final fun rootKey-mqw0cJ0 ()Ljava/lang/String; } public abstract interface class com/apollographql/cache/normalized/api/CacheKeyGenerator { - public abstract fun cacheKeyForObject (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Lcom/apollographql/cache/normalized/api/CacheKey; + public abstract fun cacheKeyForObject-z2_y8R0 (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Ljava/lang/String; } public final class com/apollographql/cache/normalized/api/CacheKeyGeneratorContext { @@ -259,7 +265,7 @@ public final class com/apollographql/cache/normalized/api/CacheKeyGeneratorConte public abstract class com/apollographql/cache/normalized/api/CacheKeyResolver : com/apollographql/cache/normalized/api/CacheResolver { public fun ()V - public abstract fun cacheKeyForField (Lcom/apollographql/cache/normalized/api/ResolverContext;)Lcom/apollographql/cache/normalized/api/CacheKey; + public abstract fun cacheKeyForField-fLoEQYY (Lcom/apollographql/cache/normalized/api/ResolverContext;)Ljava/lang/String; public fun listOfCacheKeysForField (Lcom/apollographql/cache/normalized/api/ResolverContext;)Ljava/util/List; public final fun resolveField (Lcom/apollographql/cache/normalized/api/ResolverContext;)Ljava/lang/Object; } @@ -387,14 +393,14 @@ public final class com/apollographql/cache/normalized/api/IdCacheKeyGenerator : public fun ()V public fun ([Ljava/lang/String;)V public synthetic fun ([Ljava/lang/String;ILkotlin/jvm/internal/DefaultConstructorMarker;)V - public fun cacheKeyForObject (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Lcom/apollographql/cache/normalized/api/CacheKey; + public fun cacheKeyForObject-z2_y8R0 (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Ljava/lang/String; } public final class com/apollographql/cache/normalized/api/IdCacheKeyResolver : com/apollographql/cache/normalized/api/CacheKeyResolver { public fun ()V public fun (Ljava/util/List;Ljava/util/List;)V public synthetic fun (Ljava/util/List;Ljava/util/List;ILkotlin/jvm/internal/DefaultConstructorMarker;)V - public fun cacheKeyForField (Lcom/apollographql/cache/normalized/api/ResolverContext;)Lcom/apollographql/cache/normalized/api/CacheKey; + public fun cacheKeyForField-fLoEQYY (Lcom/apollographql/cache/normalized/api/ResolverContext;)Ljava/lang/String; public fun listOfCacheKeysForField (Lcom/apollographql/cache/normalized/api/ResolverContext;)Ljava/util/List; } @@ -447,9 +453,8 @@ public abstract interface class com/apollographql/cache/normalized/api/Normalize public abstract fun merge (Lcom/apollographql/cache/normalized/api/Record;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public abstract fun merge (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public static fun prettifyDump (Ljava/util/Map;)Ljava/lang/String; - public abstract fun remove (Lcom/apollographql/cache/normalized/api/CacheKey;Z)Z - public abstract fun remove (Ljava/lang/String;)I public abstract fun remove (Ljava/util/Collection;Z)I + public abstract fun remove-eNSUWrY (Ljava/lang/String;Z)Z } public final class com/apollographql/cache/normalized/api/NormalizedCache$Companion { @@ -463,14 +468,14 @@ public abstract class com/apollographql/cache/normalized/api/NormalizedCacheFact public abstract interface class com/apollographql/cache/normalized/api/ReadOnlyNormalizedCache { public abstract fun dump ()Ljava/util/Map; - public abstract fun loadRecord (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; + public abstract fun loadRecord-eNSUWrY (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; public abstract fun loadRecords (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Collection; } public final class com/apollographql/cache/normalized/api/Record : java/util/Map, kotlin/jvm/internal/markers/KMappedMarker { public static final field Companion Lcom/apollographql/cache/normalized/api/Record$Companion; - public fun (Ljava/lang/String;Ljava/util/Map;Ljava/util/UUID;Ljava/util/Map;)V public synthetic fun (Ljava/lang/String;Ljava/util/Map;Ljava/util/UUID;Ljava/util/Map;ILkotlin/jvm/internal/DefaultConstructorMarker;)V + public synthetic fun (Ljava/lang/String;Ljava/util/Map;Ljava/util/UUID;Ljava/util/Map;Lkotlin/jvm/internal/DefaultConstructorMarker;)V public fun clear ()V public synthetic fun compute (Ljava/lang/Object;Ljava/util/function/BiFunction;)Ljava/lang/Object; public fun compute (Ljava/lang/String;Ljava/util/function/BiFunction;)Ljava/lang/Object; @@ -487,7 +492,7 @@ public final class com/apollographql/cache/normalized/api/Record : java/util/Map public fun get (Ljava/lang/String;)Ljava/lang/Object; public fun getEntries ()Ljava/util/Set; public final fun getFields ()Ljava/util/Map; - public final fun getKey ()Ljava/lang/String; + public final fun getKey-mqw0cJ0 ()Ljava/lang/String; public fun getKeys ()Ljava/util/Set; public final fun getMetadata ()Ljava/util/Map; public final fun getMutationId ()Ljava/util/UUID; @@ -542,12 +547,12 @@ public final class com/apollographql/cache/normalized/api/RecordMergerKt { } public final class com/apollographql/cache/normalized/api/ResolverContext { - public fun (Lcom/apollographql/apollo/api/CompiledField;Lcom/apollographql/apollo/api/Executable$Variables;Ljava/util/Map;Ljava/lang/String;Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Ljava/util/List;)V + public synthetic fun (Lcom/apollographql/apollo/api/CompiledField;Lcom/apollographql/apollo/api/Executable$Variables;Ljava/util/Map;Ljava/lang/String;Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Ljava/util/List;Lkotlin/jvm/internal/DefaultConstructorMarker;)V public final fun getCacheHeaders ()Lcom/apollographql/cache/normalized/api/CacheHeaders; public final fun getField ()Lcom/apollographql/apollo/api/CompiledField; public final fun getFieldKeyGenerator ()Lcom/apollographql/cache/normalized/api/FieldKeyGenerator; public final fun getParent ()Ljava/util/Map; - public final fun getParentKey ()Ljava/lang/String; + public final fun getParentKey-mqw0cJ0 ()Ljava/lang/String; public final fun getParentType ()Ljava/lang/String; public final fun getPath ()Ljava/util/List; public final fun getVariables ()Lcom/apollographql/apollo/api/Executable$Variables; @@ -560,7 +565,7 @@ public final class com/apollographql/cache/normalized/api/SchemaCoordinatesMaxAg public final class com/apollographql/cache/normalized/api/TypePolicyCacheKeyGenerator : com/apollographql/cache/normalized/api/CacheKeyGenerator { public static final field INSTANCE Lcom/apollographql/cache/normalized/api/TypePolicyCacheKeyGenerator; - public fun cacheKeyForObject (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Lcom/apollographql/cache/normalized/api/CacheKey; + public fun cacheKeyForObject-z2_y8R0 (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Ljava/lang/String; } public final class com/apollographql/cache/normalized/internal/NormalizerKt { @@ -578,13 +583,12 @@ public final class com/apollographql/cache/normalized/memory/MemoryCache : com/a public fun clearAll ()V public fun dump ()Ljava/util/Map; public final fun getSize ()I - public fun loadRecord (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; + public fun loadRecord-eNSUWrY (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; public fun loadRecords (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Collection; public fun merge (Lcom/apollographql/cache/normalized/api/Record;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public fun merge (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; - public fun remove (Lcom/apollographql/cache/normalized/api/CacheKey;Z)Z - public fun remove (Ljava/lang/String;)I public fun remove (Ljava/util/Collection;Z)I + public fun remove-eNSUWrY (Ljava/lang/String;Z)Z } public final class com/apollographql/cache/normalized/memory/MemoryCacheFactory : com/apollographql/cache/normalized/api/NormalizedCacheFactory { diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api index 59016e75..519b861e 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api @@ -59,17 +59,16 @@ abstract interface com.apollographql.cache.normalized.api/NormalizedCache : com. abstract fun merge(kotlin.collections/Collection, com.apollographql.cache.normalized.api/CacheHeaders, com.apollographql.cache.normalized.api/RecordMerger): kotlin.collections/Set // com.apollographql.cache.normalized.api/NormalizedCache.merge|merge(kotlin.collections.Collection;com.apollographql.cache.normalized.api.CacheHeaders;com.apollographql.cache.normalized.api.RecordMerger){}[0] abstract fun remove(com.apollographql.cache.normalized.api/CacheKey, kotlin/Boolean): kotlin/Boolean // com.apollographql.cache.normalized.api/NormalizedCache.remove|remove(com.apollographql.cache.normalized.api.CacheKey;kotlin.Boolean){}[0] abstract fun remove(kotlin.collections/Collection, kotlin/Boolean): kotlin/Int // com.apollographql.cache.normalized.api/NormalizedCache.remove|remove(kotlin.collections.Collection;kotlin.Boolean){}[0] - abstract fun remove(kotlin/String): kotlin/Int // com.apollographql.cache.normalized.api/NormalizedCache.remove|remove(kotlin.String){}[0] final object Companion { // com.apollographql.cache.normalized.api/NormalizedCache.Companion|null[0] - final fun prettifyDump(kotlin.collections/Map, kotlin.collections/Map>): kotlin/String // com.apollographql.cache.normalized.api/NormalizedCache.Companion.prettifyDump|prettifyDump(kotlin.collections.Map,kotlin.collections.Map>){}[0] + final fun prettifyDump(kotlin.collections/Map, kotlin.collections/Map>): kotlin/String // com.apollographql.cache.normalized.api/NormalizedCache.Companion.prettifyDump|prettifyDump(kotlin.collections.Map,kotlin.collections.Map>){}[0] } } abstract interface com.apollographql.cache.normalized.api/ReadOnlyNormalizedCache { // com.apollographql.cache.normalized.api/ReadOnlyNormalizedCache|null[0] - abstract fun dump(): kotlin.collections/Map, kotlin.collections/Map> // com.apollographql.cache.normalized.api/ReadOnlyNormalizedCache.dump|dump(){}[0] - abstract fun loadRecord(kotlin/String, com.apollographql.cache.normalized.api/CacheHeaders): com.apollographql.cache.normalized.api/Record? // com.apollographql.cache.normalized.api/ReadOnlyNormalizedCache.loadRecord|loadRecord(kotlin.String;com.apollographql.cache.normalized.api.CacheHeaders){}[0] - abstract fun loadRecords(kotlin.collections/Collection, com.apollographql.cache.normalized.api/CacheHeaders): kotlin.collections/Collection // com.apollographql.cache.normalized.api/ReadOnlyNormalizedCache.loadRecords|loadRecords(kotlin.collections.Collection;com.apollographql.cache.normalized.api.CacheHeaders){}[0] + abstract fun dump(): kotlin.collections/Map, kotlin.collections/Map> // com.apollographql.cache.normalized.api/ReadOnlyNormalizedCache.dump|dump(){}[0] + abstract fun loadRecord(com.apollographql.cache.normalized.api/CacheKey, com.apollographql.cache.normalized.api/CacheHeaders): com.apollographql.cache.normalized.api/Record? // com.apollographql.cache.normalized.api/ReadOnlyNormalizedCache.loadRecord|loadRecord(com.apollographql.cache.normalized.api.CacheKey;com.apollographql.cache.normalized.api.CacheHeaders){}[0] + abstract fun loadRecords(kotlin.collections/Collection, com.apollographql.cache.normalized.api/CacheHeaders): kotlin.collections/Collection // com.apollographql.cache.normalized.api/ReadOnlyNormalizedCache.loadRecords|loadRecords(kotlin.collections.Collection;com.apollographql.cache.normalized.api.CacheHeaders){}[0] } abstract interface com.apollographql.cache.normalized.api/RecordMerger { // com.apollographql.cache.normalized.api/RecordMerger|null[0] @@ -80,7 +79,7 @@ abstract interface com.apollographql.cache.normalized/ApolloStore { // com.apoll abstract val changedKeys // com.apollographql.cache.normalized/ApolloStore.changedKeys|{}changedKeys[0] abstract fun (): kotlinx.coroutines.flow/SharedFlow> // com.apollographql.cache.normalized/ApolloStore.changedKeys.|(){}[0] - abstract fun <#A1: com.apollographql.apollo.api/Executable.Data> normalize(com.apollographql.apollo.api/Executable<#A1>, kotlin.collections/Map, kotlin/String = ..., com.apollographql.apollo.api/CustomScalarAdapters = ...): kotlin.collections/Map // com.apollographql.cache.normalized/ApolloStore.normalize|normalize(com.apollographql.apollo.api.Executable<0:0>;kotlin.collections.Map;kotlin.String;com.apollographql.apollo.api.CustomScalarAdapters){0§}[0] + abstract fun <#A1: com.apollographql.apollo.api/Executable.Data> normalize(com.apollographql.apollo.api/Executable<#A1>, kotlin.collections/Map, kotlin/String = ..., com.apollographql.apollo.api/CustomScalarAdapters = ...): kotlin.collections/Map // com.apollographql.cache.normalized/ApolloStore.normalize|normalize(com.apollographql.apollo.api.Executable<0:0>;kotlin.collections.Map;kotlin.String;com.apollographql.apollo.api.CustomScalarAdapters){0§}[0] abstract fun <#A1: com.apollographql.apollo.api/Fragment.Data> readFragment(com.apollographql.apollo.api/Fragment<#A1>, com.apollographql.cache.normalized.api/CacheKey, com.apollographql.apollo.api/CustomScalarAdapters = ..., com.apollographql.cache.normalized.api/CacheHeaders = ...): com.apollographql.cache.normalized/ApolloStore.ReadResult<#A1> // com.apollographql.cache.normalized/ApolloStore.readFragment|readFragment(com.apollographql.apollo.api.Fragment<0:0>;com.apollographql.cache.normalized.api.CacheKey;com.apollographql.apollo.api.CustomScalarAdapters;com.apollographql.cache.normalized.api.CacheHeaders){0§}[0] abstract fun <#A1: com.apollographql.apollo.api/Fragment.Data> writeFragment(com.apollographql.apollo.api/Fragment<#A1>, com.apollographql.cache.normalized.api/CacheKey, #A1, com.apollographql.apollo.api/CustomScalarAdapters = ..., com.apollographql.cache.normalized.api/CacheHeaders = ...): kotlin.collections/Set // com.apollographql.cache.normalized/ApolloStore.writeFragment|writeFragment(com.apollographql.apollo.api.Fragment<0:0>;com.apollographql.cache.normalized.api.CacheKey;0:0;com.apollographql.apollo.api.CustomScalarAdapters;com.apollographql.cache.normalized.api.CacheHeaders){0§}[0] abstract fun <#A1: com.apollographql.apollo.api/Fragment.Data> writeOptimisticUpdates(com.apollographql.apollo.api/Fragment<#A1>, com.apollographql.cache.normalized.api/CacheKey, #A1, com.benasher44.uuid/Uuid, com.apollographql.apollo.api/CustomScalarAdapters = ...): kotlin.collections/Set // com.apollographql.cache.normalized/ApolloStore.writeOptimisticUpdates|writeOptimisticUpdates(com.apollographql.apollo.api.Fragment<0:0>;com.apollographql.cache.normalized.api.CacheKey;0:0;com.benasher44.uuid.Uuid;com.apollographql.apollo.api.CustomScalarAdapters){0§}[0] @@ -91,7 +90,7 @@ abstract interface com.apollographql.cache.normalized/ApolloStore { // com.apoll abstract fun <#A1: kotlin/Any?> accessCache(kotlin/Function1): #A1 // com.apollographql.cache.normalized/ApolloStore.accessCache|accessCache(kotlin.Function1){0§}[0] abstract fun clearAll(): kotlin/Boolean // com.apollographql.cache.normalized/ApolloStore.clearAll|clearAll(){}[0] abstract fun dispose() // com.apollographql.cache.normalized/ApolloStore.dispose|dispose(){}[0] - abstract fun dump(): kotlin.collections/Map, kotlin.collections/Map> // com.apollographql.cache.normalized/ApolloStore.dump|dump(){}[0] + abstract fun dump(): kotlin.collections/Map, kotlin.collections/Map> // com.apollographql.cache.normalized/ApolloStore.dump|dump(){}[0] abstract fun remove(com.apollographql.cache.normalized.api/CacheKey, kotlin/Boolean = ...): kotlin/Boolean // com.apollographql.cache.normalized/ApolloStore.remove|remove(com.apollographql.cache.normalized.api.CacheKey;kotlin.Boolean){}[0] abstract fun remove(kotlin.collections/List, kotlin/Boolean = ...): kotlin/Int // com.apollographql.cache.normalized/ApolloStore.remove|remove(kotlin.collections.List;kotlin.Boolean){}[0] abstract fun rollbackOptimisticUpdates(com.benasher44.uuid/Uuid): kotlin.collections/Set // com.apollographql.cache.normalized/ApolloStore.rollbackOptimisticUpdates|rollbackOptimisticUpdates(com.benasher44.uuid.Uuid){}[0] @@ -170,26 +169,6 @@ final class com.apollographql.cache.normalized.api/CacheHeaders { // com.apollog } } -final class com.apollographql.cache.normalized.api/CacheKey { // com.apollographql.cache.normalized.api/CacheKey|null[0] - constructor (kotlin/String, kotlin.collections/List) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String;kotlin.collections.List){}[0] - constructor (kotlin/String, kotlin/Array...) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String;kotlin.Array...){}[0] - constructor (kotlin/String, kotlin/Boolean = ...) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String;kotlin.Boolean){}[0] - - final val key // com.apollographql.cache.normalized.api/CacheKey.key|{}key[0] - final fun (): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.key.|(){}[0] - - final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.api/CacheKey.equals|equals(kotlin.Any?){}[0] - final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.api/CacheKey.hashCode|hashCode(){}[0] - final fun serialize(): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.serialize|serialize(){}[0] - final fun toString(): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.toString|toString(){}[0] - - final object Companion { // com.apollographql.cache.normalized.api/CacheKey.Companion|null[0] - final fun canDeserialize(kotlin/String): kotlin/Boolean // com.apollographql.cache.normalized.api/CacheKey.Companion.canDeserialize|canDeserialize(kotlin.String){}[0] - final fun deserialize(kotlin/String): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.api/CacheKey.Companion.deserialize|deserialize(kotlin.String){}[0] - final fun rootKey(): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.api/CacheKey.Companion.rootKey|rootKey(){}[0] - } -} - final class com.apollographql.cache.normalized.api/CacheKeyGeneratorContext { // com.apollographql.cache.normalized.api/CacheKeyGeneratorContext|null[0] constructor (com.apollographql.apollo.api/CompiledField, com.apollographql.apollo.api/Executable.Variables) // com.apollographql.cache.normalized.api/CacheKeyGeneratorContext.|(com.apollographql.apollo.api.CompiledField;com.apollographql.apollo.api.Executable.Variables){}[0] @@ -315,14 +294,14 @@ final class com.apollographql.cache.normalized.api/MetadataGeneratorContext { // } final class com.apollographql.cache.normalized.api/Record : kotlin.collections/Map { // com.apollographql.cache.normalized.api/Record|null[0] - constructor (kotlin/String, kotlin.collections/Map, com.benasher44.uuid/Uuid? = ..., kotlin.collections/Map> = ...) // com.apollographql.cache.normalized.api/Record.|(kotlin.String;kotlin.collections.Map;com.benasher44.uuid.Uuid?;kotlin.collections.Map>){}[0] + constructor (com.apollographql.cache.normalized.api/CacheKey, kotlin.collections/Map, com.benasher44.uuid/Uuid? = ..., kotlin.collections/Map> = ...) // com.apollographql.cache.normalized.api/Record.|(com.apollographql.cache.normalized.api.CacheKey;kotlin.collections.Map;com.benasher44.uuid.Uuid?;kotlin.collections.Map>){}[0] final val entries // com.apollographql.cache.normalized.api/Record.entries|{}entries[0] final fun (): kotlin.collections/Set> // com.apollographql.cache.normalized.api/Record.entries.|(){}[0] final val fields // com.apollographql.cache.normalized.api/Record.fields|{}fields[0] final fun (): kotlin.collections/Map // com.apollographql.cache.normalized.api/Record.fields.|(){}[0] final val key // com.apollographql.cache.normalized.api/Record.key|{}key[0] - final fun (): kotlin/String // com.apollographql.cache.normalized.api/Record.key.|(){}[0] + final fun (): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.api/Record.key.|(){}[0] final val keys // com.apollographql.cache.normalized.api/Record.keys|{}keys[0] final fun (): kotlin.collections/Set // com.apollographql.cache.normalized.api/Record.keys.|(){}[0] final val metadata // com.apollographql.cache.normalized.api/Record.metadata|{}metadata[0] @@ -362,7 +341,7 @@ final class com.apollographql.cache.normalized.api/RecordMergerContext { // com. } final class com.apollographql.cache.normalized.api/ResolverContext { // com.apollographql.cache.normalized.api/ResolverContext|null[0] - constructor (com.apollographql.apollo.api/CompiledField, com.apollographql.apollo.api/Executable.Variables, kotlin.collections/Map, kotlin/String, kotlin/String, com.apollographql.cache.normalized.api/CacheHeaders, com.apollographql.cache.normalized.api/FieldKeyGenerator, kotlin.collections/List) // com.apollographql.cache.normalized.api/ResolverContext.|(com.apollographql.apollo.api.CompiledField;com.apollographql.apollo.api.Executable.Variables;kotlin.collections.Map;kotlin.String;kotlin.String;com.apollographql.cache.normalized.api.CacheHeaders;com.apollographql.cache.normalized.api.FieldKeyGenerator;kotlin.collections.List){}[0] + constructor (com.apollographql.apollo.api/CompiledField, com.apollographql.apollo.api/Executable.Variables, kotlin.collections/Map, com.apollographql.cache.normalized.api/CacheKey, kotlin/String, com.apollographql.cache.normalized.api/CacheHeaders, com.apollographql.cache.normalized.api/FieldKeyGenerator, kotlin.collections/List) // com.apollographql.cache.normalized.api/ResolverContext.|(com.apollographql.apollo.api.CompiledField;com.apollographql.apollo.api.Executable.Variables;kotlin.collections.Map;com.apollographql.cache.normalized.api.CacheKey;kotlin.String;com.apollographql.cache.normalized.api.CacheHeaders;com.apollographql.cache.normalized.api.FieldKeyGenerator;kotlin.collections.List){}[0] final val cacheHeaders // com.apollographql.cache.normalized.api/ResolverContext.cacheHeaders|{}cacheHeaders[0] final fun (): com.apollographql.cache.normalized.api/CacheHeaders // com.apollographql.cache.normalized.api/ResolverContext.cacheHeaders.|(){}[0] @@ -373,7 +352,7 @@ final class com.apollographql.cache.normalized.api/ResolverContext { // com.apol final val parent // com.apollographql.cache.normalized.api/ResolverContext.parent|{}parent[0] final fun (): kotlin.collections/Map // com.apollographql.cache.normalized.api/ResolverContext.parent.|(){}[0] final val parentKey // com.apollographql.cache.normalized.api/ResolverContext.parentKey|{}parentKey[0] - final fun (): kotlin/String // com.apollographql.cache.normalized.api/ResolverContext.parentKey.|(){}[0] + final fun (): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.api/ResolverContext.parentKey.|(){}[0] final val parentType // com.apollographql.cache.normalized.api/ResolverContext.parentType|{}parentType[0] final fun (): kotlin/String // com.apollographql.cache.normalized.api/ResolverContext.parentType.|(){}[0] final val path // com.apollographql.cache.normalized.api/ResolverContext.path|{}path[0] @@ -395,14 +374,13 @@ final class com.apollographql.cache.normalized.memory/MemoryCache : com.apollogr final fun (): kotlin/Int // com.apollographql.cache.normalized.memory/MemoryCache.size.|(){}[0] final fun clearAll() // com.apollographql.cache.normalized.memory/MemoryCache.clearAll|clearAll(){}[0] - final fun dump(): kotlin.collections/Map, kotlin.collections/Map> // com.apollographql.cache.normalized.memory/MemoryCache.dump|dump(){}[0] - final fun loadRecord(kotlin/String, com.apollographql.cache.normalized.api/CacheHeaders): com.apollographql.cache.normalized.api/Record? // com.apollographql.cache.normalized.memory/MemoryCache.loadRecord|loadRecord(kotlin.String;com.apollographql.cache.normalized.api.CacheHeaders){}[0] - final fun loadRecords(kotlin.collections/Collection, com.apollographql.cache.normalized.api/CacheHeaders): kotlin.collections/Collection // com.apollographql.cache.normalized.memory/MemoryCache.loadRecords|loadRecords(kotlin.collections.Collection;com.apollographql.cache.normalized.api.CacheHeaders){}[0] + final fun dump(): kotlin.collections/Map, kotlin.collections/Map> // com.apollographql.cache.normalized.memory/MemoryCache.dump|dump(){}[0] + final fun loadRecord(com.apollographql.cache.normalized.api/CacheKey, com.apollographql.cache.normalized.api/CacheHeaders): com.apollographql.cache.normalized.api/Record? // com.apollographql.cache.normalized.memory/MemoryCache.loadRecord|loadRecord(com.apollographql.cache.normalized.api.CacheKey;com.apollographql.cache.normalized.api.CacheHeaders){}[0] + final fun loadRecords(kotlin.collections/Collection, com.apollographql.cache.normalized.api/CacheHeaders): kotlin.collections/Collection // com.apollographql.cache.normalized.memory/MemoryCache.loadRecords|loadRecords(kotlin.collections.Collection;com.apollographql.cache.normalized.api.CacheHeaders){}[0] final fun merge(com.apollographql.cache.normalized.api/Record, com.apollographql.cache.normalized.api/CacheHeaders, com.apollographql.cache.normalized.api/RecordMerger): kotlin.collections/Set // com.apollographql.cache.normalized.memory/MemoryCache.merge|merge(com.apollographql.cache.normalized.api.Record;com.apollographql.cache.normalized.api.CacheHeaders;com.apollographql.cache.normalized.api.RecordMerger){}[0] final fun merge(kotlin.collections/Collection, com.apollographql.cache.normalized.api/CacheHeaders, com.apollographql.cache.normalized.api/RecordMerger): kotlin.collections/Set // com.apollographql.cache.normalized.memory/MemoryCache.merge|merge(kotlin.collections.Collection;com.apollographql.cache.normalized.api.CacheHeaders;com.apollographql.cache.normalized.api.RecordMerger){}[0] final fun remove(com.apollographql.cache.normalized.api/CacheKey, kotlin/Boolean): kotlin/Boolean // com.apollographql.cache.normalized.memory/MemoryCache.remove|remove(com.apollographql.cache.normalized.api.CacheKey;kotlin.Boolean){}[0] final fun remove(kotlin.collections/Collection, kotlin/Boolean): kotlin/Int // com.apollographql.cache.normalized.memory/MemoryCache.remove|remove(kotlin.collections.Collection;kotlin.Boolean){}[0] - final fun remove(kotlin/String): kotlin/Int // com.apollographql.cache.normalized.memory/MemoryCache.remove|remove(kotlin.String){}[0] } final class com.apollographql.cache.normalized.memory/MemoryCacheFactory : com.apollographql.cache.normalized.api/NormalizedCacheFactory { // com.apollographql.cache.normalized.memory/MemoryCacheFactory|null[0] @@ -480,6 +458,26 @@ final class com.apollographql.cache.normalized/RemovedFieldsAndRecords { // com. final fun (): kotlin.collections/Set // com.apollographql.cache.normalized/RemovedFieldsAndRecords.removedRecords.|(){}[0] } +final value class com.apollographql.cache.normalized.api/CacheKey { // com.apollographql.cache.normalized.api/CacheKey|null[0] + constructor (kotlin/String, kotlin.collections/List) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String;kotlin.collections.List){}[0] + constructor (kotlin/String, kotlin/Array...) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String;kotlin.Array...){}[0] + constructor (kotlin/String, kotlin/Boolean = ...) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String;kotlin.Boolean){}[0] + + final val key // com.apollographql.cache.normalized.api/CacheKey.key|{}key[0] + final fun (): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.key.|(){}[0] + + final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.api/CacheKey.equals|equals(kotlin.Any?){}[0] + final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.api/CacheKey.hashCode|hashCode(){}[0] + final fun serialize(): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.serialize|serialize(){}[0] + final fun toString(): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.toString|toString(){}[0] + + final object Companion { // com.apollographql.cache.normalized.api/CacheKey.Companion|null[0] + final fun canDeserialize(kotlin/String): kotlin/Boolean // com.apollographql.cache.normalized.api/CacheKey.Companion.canDeserialize|canDeserialize(kotlin.String){}[0] + final fun deserialize(kotlin/String): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.api/CacheKey.Companion.deserialize|deserialize(kotlin.String){}[0] + final fun rootKey(): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.api/CacheKey.Companion.rootKey|rootKey(){}[0] + } +} + final object com.apollographql.cache.normalized.api/ApolloCacheHeaders { // com.apollographql.cache.normalized.api/ApolloCacheHeaders|null[0] final const val DO_NOT_STORE // com.apollographql.cache.normalized.api/ApolloCacheHeaders.DO_NOT_STORE|{}DO_NOT_STORE[0] final fun (): kotlin/String // com.apollographql.cache.normalized.api/ApolloCacheHeaders.DO_NOT_STORE.|(){}[0] @@ -558,7 +556,7 @@ final val com.apollographql.cache.normalized/isFromCache // com.apollographql.ca final fun (com.apollographql.apollo/ApolloClient.Builder).com.apollographql.cache.normalized/logCacheMisses(kotlin/Function1 = ...): com.apollographql.apollo/ApolloClient.Builder // com.apollographql.cache.normalized/logCacheMisses|logCacheMisses@com.apollographql.apollo.ApolloClient.Builder(kotlin.Function1){}[0] final fun (com.apollographql.apollo/ApolloClient.Builder).com.apollographql.cache.normalized/normalizedCache(com.apollographql.cache.normalized.api/NormalizedCacheFactory, com.apollographql.cache.normalized.api/CacheKeyGenerator = ..., com.apollographql.cache.normalized.api/MetadataGenerator = ..., com.apollographql.cache.normalized.api/CacheResolver = ..., com.apollographql.cache.normalized.api/RecordMerger = ..., com.apollographql.cache.normalized.api/FieldKeyGenerator = ..., com.apollographql.cache.normalized.api/EmbeddedFieldsProvider = ..., kotlin/Boolean = ...): com.apollographql.apollo/ApolloClient.Builder // com.apollographql.cache.normalized/normalizedCache|normalizedCache@com.apollographql.apollo.ApolloClient.Builder(com.apollographql.cache.normalized.api.NormalizedCacheFactory;com.apollographql.cache.normalized.api.CacheKeyGenerator;com.apollographql.cache.normalized.api.MetadataGenerator;com.apollographql.cache.normalized.api.CacheResolver;com.apollographql.cache.normalized.api.RecordMerger;com.apollographql.cache.normalized.api.FieldKeyGenerator;com.apollographql.cache.normalized.api.EmbeddedFieldsProvider;kotlin.Boolean){}[0] final fun (com.apollographql.apollo/ApolloClient.Builder).com.apollographql.cache.normalized/store(com.apollographql.cache.normalized/ApolloStore, kotlin/Boolean = ...): com.apollographql.apollo/ApolloClient.Builder // com.apollographql.cache.normalized/store|store@com.apollographql.apollo.ApolloClient.Builder(com.apollographql.cache.normalized.ApolloStore;kotlin.Boolean){}[0] -final fun (com.apollographql.cache.normalized.api/NormalizedCache).com.apollographql.cache.normalized/allRecords(): kotlin.collections/Map // com.apollographql.cache.normalized/allRecords|allRecords@com.apollographql.cache.normalized.api.NormalizedCache(){}[0] +final fun (com.apollographql.cache.normalized.api/NormalizedCache).com.apollographql.cache.normalized/allRecords(): kotlin.collections/Map // com.apollographql.cache.normalized/allRecords|allRecords@com.apollographql.cache.normalized.api.NormalizedCache(){}[0] final fun (com.apollographql.cache.normalized.api/NormalizedCache).com.apollographql.cache.normalized/garbageCollect(com.apollographql.cache.normalized.api/MaxAgeProvider, kotlin.time/Duration = ...): com.apollographql.cache.normalized/GarbageCollectResult // com.apollographql.cache.normalized/garbageCollect|garbageCollect@com.apollographql.cache.normalized.api.NormalizedCache(com.apollographql.cache.normalized.api.MaxAgeProvider;kotlin.time.Duration){}[0] final fun (com.apollographql.cache.normalized.api/NormalizedCache).com.apollographql.cache.normalized/removeDanglingReferences(): com.apollographql.cache.normalized/RemovedFieldsAndRecords // com.apollographql.cache.normalized/removeDanglingReferences|removeDanglingReferences@com.apollographql.cache.normalized.api.NormalizedCache(){}[0] final fun (com.apollographql.cache.normalized.api/NormalizedCache).com.apollographql.cache.normalized/removeStaleFields(com.apollographql.cache.normalized.api/MaxAgeProvider, kotlin.time/Duration = ...): com.apollographql.cache.normalized/RemovedFieldsAndRecords // com.apollographql.cache.normalized/removeStaleFields|removeStaleFields@com.apollographql.cache.normalized.api.NormalizedCache(com.apollographql.cache.normalized.api.MaxAgeProvider;kotlin.time.Duration){}[0] @@ -572,11 +570,11 @@ final fun (com.apollographql.cache.normalized/ApolloStore).com.apollographql.cac final fun (com.apollographql.cache.normalized/ApolloStore).com.apollographql.cache.normalized/removeStaleFields(com.apollographql.cache.normalized.api/MaxAgeProvider, kotlin.time/Duration = ...): com.apollographql.cache.normalized/RemovedFieldsAndRecords // com.apollographql.cache.normalized/removeStaleFields|removeStaleFields@com.apollographql.cache.normalized.ApolloStore(com.apollographql.cache.normalized.api.MaxAgeProvider;kotlin.time.Duration){}[0] final fun (com.apollographql.cache.normalized/ApolloStore).com.apollographql.cache.normalized/removeUnreachableRecords(): kotlin.collections/Set // com.apollographql.cache.normalized/removeUnreachableRecords|removeUnreachableRecords@com.apollographql.cache.normalized.ApolloStore(){}[0] final fun (kotlin.collections/Collection?).com.apollographql.cache.normalized.api/dependentKeys(): kotlin.collections/Set // com.apollographql.cache.normalized.api/dependentKeys|dependentKeys@kotlin.collections.Collection?(){}[0] -final fun (kotlin.collections/Map).com.apollographql.cache.normalized/getReachableCacheKeys(): kotlin.collections/Set // com.apollographql.cache.normalized/getReachableCacheKeys|getReachableCacheKeys@kotlin.collections.Map(){}[0] +final fun (kotlin.collections/Map).com.apollographql.cache.normalized/getReachableCacheKeys(): kotlin.collections/Set // com.apollographql.cache.normalized/getReachableCacheKeys|getReachableCacheKeys@kotlin.collections.Map(){}[0] final fun (kotlin/String).com.apollographql.cache.normalized.internal/hashed(): kotlin/String // com.apollographql.cache.normalized.internal/hashed|hashed@kotlin.String(){}[0] final fun <#A: com.apollographql.apollo.api/Executable.Data> (#A).com.apollographql.cache.normalized.api/withErrors(com.apollographql.apollo.api/Executable<#A>, kotlin.collections/List?, com.apollographql.apollo.api/CustomScalarAdapters = ...): kotlin.collections/Map // com.apollographql.cache.normalized.api/withErrors|withErrors@0:0(com.apollographql.apollo.api.Executable<0:0>;kotlin.collections.List?;com.apollographql.apollo.api.CustomScalarAdapters){0§}[0] -final fun <#A: com.apollographql.apollo.api/Executable.Data> (#A).com.apollographql.cache.normalized.internal/normalized(com.apollographql.apollo.api/Executable<#A>, kotlin/String = ..., com.apollographql.apollo.api/CustomScalarAdapters = ..., com.apollographql.cache.normalized.api/CacheKeyGenerator = ..., com.apollographql.cache.normalized.api/MetadataGenerator = ..., com.apollographql.cache.normalized.api/FieldKeyGenerator = ..., com.apollographql.cache.normalized.api/EmbeddedFieldsProvider = ...): kotlin.collections/Map // com.apollographql.cache.normalized.internal/normalized|normalized@0:0(com.apollographql.apollo.api.Executable<0:0>;kotlin.String;com.apollographql.apollo.api.CustomScalarAdapters;com.apollographql.cache.normalized.api.CacheKeyGenerator;com.apollographql.cache.normalized.api.MetadataGenerator;com.apollographql.cache.normalized.api.FieldKeyGenerator;com.apollographql.cache.normalized.api.EmbeddedFieldsProvider){0§}[0] -final fun <#A: com.apollographql.apollo.api/Executable.Data> (kotlin.collections/Map).com.apollographql.cache.normalized.internal/normalized(com.apollographql.apollo.api/Executable<#A>, kotlin/String = ..., com.apollographql.apollo.api/CustomScalarAdapters = ..., com.apollographql.cache.normalized.api/CacheKeyGenerator = ..., com.apollographql.cache.normalized.api/MetadataGenerator = ..., com.apollographql.cache.normalized.api/FieldKeyGenerator = ..., com.apollographql.cache.normalized.api/EmbeddedFieldsProvider = ...): kotlin.collections/Map // com.apollographql.cache.normalized.internal/normalized|normalized@kotlin.collections.Map(com.apollographql.apollo.api.Executable<0:0>;kotlin.String;com.apollographql.apollo.api.CustomScalarAdapters;com.apollographql.cache.normalized.api.CacheKeyGenerator;com.apollographql.cache.normalized.api.MetadataGenerator;com.apollographql.cache.normalized.api.FieldKeyGenerator;com.apollographql.cache.normalized.api.EmbeddedFieldsProvider){0§}[0] +final fun <#A: com.apollographql.apollo.api/Executable.Data> (#A).com.apollographql.cache.normalized.internal/normalized(com.apollographql.apollo.api/Executable<#A>, kotlin/String = ..., com.apollographql.apollo.api/CustomScalarAdapters = ..., com.apollographql.cache.normalized.api/CacheKeyGenerator = ..., com.apollographql.cache.normalized.api/MetadataGenerator = ..., com.apollographql.cache.normalized.api/FieldKeyGenerator = ..., com.apollographql.cache.normalized.api/EmbeddedFieldsProvider = ...): kotlin.collections/Map // com.apollographql.cache.normalized.internal/normalized|normalized@0:0(com.apollographql.apollo.api.Executable<0:0>;kotlin.String;com.apollographql.apollo.api.CustomScalarAdapters;com.apollographql.cache.normalized.api.CacheKeyGenerator;com.apollographql.cache.normalized.api.MetadataGenerator;com.apollographql.cache.normalized.api.FieldKeyGenerator;com.apollographql.cache.normalized.api.EmbeddedFieldsProvider){0§}[0] +final fun <#A: com.apollographql.apollo.api/Executable.Data> (kotlin.collections/Map).com.apollographql.cache.normalized.internal/normalized(com.apollographql.apollo.api/Executable<#A>, kotlin/String = ..., com.apollographql.apollo.api/CustomScalarAdapters = ..., com.apollographql.cache.normalized.api/CacheKeyGenerator = ..., com.apollographql.cache.normalized.api/MetadataGenerator = ..., com.apollographql.cache.normalized.api/FieldKeyGenerator = ..., com.apollographql.cache.normalized.api/EmbeddedFieldsProvider = ...): kotlin.collections/Map // com.apollographql.cache.normalized.internal/normalized|normalized@kotlin.collections.Map(com.apollographql.apollo.api.Executable<0:0>;kotlin.String;com.apollographql.apollo.api.CustomScalarAdapters;com.apollographql.cache.normalized.api.CacheKeyGenerator;com.apollographql.cache.normalized.api.MetadataGenerator;com.apollographql.cache.normalized.api.FieldKeyGenerator;com.apollographql.cache.normalized.api.EmbeddedFieldsProvider){0§}[0] final fun <#A: com.apollographql.apollo.api/Mutation.Data> (com.apollographql.apollo.api/ApolloRequest.Builder<#A>).com.apollographql.cache.normalized/optimisticUpdates(#A): com.apollographql.apollo.api/ApolloRequest.Builder<#A> // com.apollographql.cache.normalized/optimisticUpdates|optimisticUpdates@com.apollographql.apollo.api.ApolloRequest.Builder<0:0>(0:0){0§}[0] final fun <#A: com.apollographql.apollo.api/Mutation.Data> (com.apollographql.apollo/ApolloCall<#A>).com.apollographql.cache.normalized/optimisticUpdates(#A): com.apollographql.apollo/ApolloCall<#A> // com.apollographql.cache.normalized/optimisticUpdates|optimisticUpdates@com.apollographql.apollo.ApolloCall<0:0>(0:0){0§}[0] final fun <#A: com.apollographql.apollo.api/Operation.Data> (com.apollographql.apollo.api/ApolloRequest.Builder<#A>).com.apollographql.cache.normalized/fetchFromCache(kotlin/Boolean): com.apollographql.apollo.api/ApolloRequest.Builder<#A> // com.apollographql.cache.normalized/fetchFromCache|fetchFromCache@com.apollographql.apollo.api.ApolloRequest.Builder<0:0>(kotlin.Boolean){0§}[0] diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt index 8086c22d..d1437da5 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt @@ -246,7 +246,7 @@ interface ApolloStore { dataWithErrors: DataWithErrors, rootKey: String = CacheKey.rootKey().key, customScalarAdapters: CustomScalarAdapters = CustomScalarAdapters.Empty, - ): Map + ): Map /** * Publishes a set of keys that have changed. This will notify subscribers of [changedKeys]. @@ -273,7 +273,7 @@ interface ApolloStore { * * This is a synchronous operation that might block if the underlying cache is doing IO. */ - fun dump(): Map, Map> + fun dump(): Map, Map> /** * Releases resources associated with this store. @@ -312,11 +312,13 @@ internal interface ApolloStoreInterceptor : ApolloInterceptor internal fun ApolloStore.cacheDumpProvider(): () -> Map>>> { return { dump().map { (cacheClass, cacheRecords) -> - cacheClass.normalizedCacheName() to cacheRecords.mapValues { (_, record) -> - record.size to record.fields.mapValues { (_, value) -> - value.toExternal() - } - } + cacheClass.normalizedCacheName() to cacheRecords + .mapKeys { (key, _) -> key.key } + .mapValues { (_, record) -> + record.size to record.fields.mapValues { (_, value) -> + value.toExternal() + } + } }.toMap() } } diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/GarbageCollection.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/GarbageCollection.kt index 4cf0acc3..fb9f5b69 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/GarbageCollection.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/GarbageCollection.kt @@ -11,16 +11,17 @@ import com.apollographql.cache.normalized.api.NormalizedCache import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.RecordValue import com.apollographql.cache.normalized.api.expirationDate +import com.apollographql.cache.normalized.api.fieldKey import com.apollographql.cache.normalized.api.receivedDate import kotlin.time.Duration @ApolloInternal -fun Map.getReachableCacheKeys(): Set { - fun Map.getReachableCacheKeys(roots: List, reachableCacheKeys: MutableSet) { - val records = roots.mapNotNull { this[it.key] } +fun Map.getReachableCacheKeys(): Set { + fun Map.getReachableCacheKeys(roots: List, reachableCacheKeys: MutableSet) { + val records = roots.mapNotNull { this[it] } val cacheKeysToCheck = mutableListOf() for (record in records) { - reachableCacheKeys.add(CacheKey(record.key)) + reachableCacheKeys.add(record.key) cacheKeysToCheck.addAll(record.referencedFields() - reachableCacheKeys) } if (cacheKeysToCheck.isNotEmpty()) { @@ -34,7 +35,7 @@ fun Map.getReachableCacheKeys(): Set { } @ApolloInternal -fun NormalizedCache.allRecords(): Map { +fun NormalizedCache.allRecords(): Map { return dump().values.fold(emptyMap()) { acc, map -> acc + map } } @@ -49,8 +50,8 @@ fun NormalizedCache.removeUnreachableRecords(): Set { return removeUnreachableRecords(allRecords) } -private fun NormalizedCache.removeUnreachableRecords(allRecords: Map): Set { - val unreachableCacheKeys = allRecords.keys.map { CacheKey(it) } - allRecords.getReachableCacheKeys() +private fun NormalizedCache.removeUnreachableRecords(allRecords: Map): Set { + val unreachableCacheKeys = allRecords.keys - allRecords.getReachableCacheKeys() remove(unreachableCacheKeys, cascade = false) return unreachableCacheKeys.toSet() } @@ -89,11 +90,11 @@ fun NormalizedCache.removeStaleFields( } private fun NormalizedCache.removeStaleFields( - allRecords: MutableMap, + allRecords: MutableMap, maxAgeProvider: MaxAgeProvider, maxStale: Duration, ): RemovedFieldsAndRecords { - val recordsToUpdate = mutableMapOf() + val recordsToUpdate = mutableMapOf() val removedFields = mutableSetOf() for (record in allRecords.values.toList()) { var recordCopy = record @@ -115,7 +116,7 @@ private fun NormalizedCache.removeStaleFields( if (staleDuration >= maxStale.inWholeSeconds) { recordCopy -= field.key recordsToUpdate[record.key] = recordCopy - removedFields.add(record.key + "." + field.key) + removedFields.add(record.key.fieldKey((field.key))) if (recordCopy.isEmptyRecord()) { allRecords.remove(record.key) } else { @@ -133,7 +134,7 @@ private fun NormalizedCache.removeStaleFields( if (staleDuration >= maxStale.inWholeSeconds) { recordCopy -= field.key recordsToUpdate[record.key] = recordCopy - removedFields.add(record.key + "." + field.key) + removedFields.add(record.key.fieldKey(field.key)) if (recordCopy.isEmptyRecord()) { allRecords.remove(record.key) } else { @@ -144,7 +145,7 @@ private fun NormalizedCache.removeStaleFields( } } if (recordsToUpdate.isNotEmpty()) { - remove(recordsToUpdate.keys.map { CacheKey(it) }, cascade = false) + remove(recordsToUpdate.keys, cascade = false) val emptyRecords = recordsToUpdate.values.filter { it.isEmptyRecord() }.toSet() val nonEmptyRecords = recordsToUpdate.values - emptyRecords if (nonEmptyRecords.isNotEmpty()) { @@ -152,7 +153,7 @@ private fun NormalizedCache.removeStaleFields( } return RemovedFieldsAndRecords( removedFields = removedFields, - removedRecords = emptyRecords.map { CacheKey(it.key) }.toSet() + removedRecords = emptyRecords.map { it.key }.toSet() ) } return RemovedFieldsAndRecords(removedFields = emptySet(), removedRecords = emptySet()) @@ -182,12 +183,12 @@ fun ApolloStore.removeStaleFields( * @return the fields and records that were removed. */ fun NormalizedCache.removeDanglingReferences(): RemovedFieldsAndRecords { - val allRecords: MutableMap = allRecords().toMutableMap() + val allRecords: MutableMap = allRecords().toMutableMap() return removeDanglingReferences(allRecords) } -private fun NormalizedCache.removeDanglingReferences(allRecords: MutableMap): RemovedFieldsAndRecords { - val recordsToUpdate = mutableMapOf() +private fun NormalizedCache.removeDanglingReferences(allRecords: MutableMap): RemovedFieldsAndRecords { + val recordsToUpdate = mutableMapOf() val allRemovedFields = mutableSetOf() do { val removedFields = mutableSetOf() @@ -197,7 +198,7 @@ private fun NormalizedCache.removeDanglingReferences(allRecords: MutableMap): Boolean { +private fun RecordValue.isDanglingReference(allRecords: Map): Boolean { return when (this) { - is CacheKey -> allRecords[this.key] == null + is CacheKey -> allRecords[this] == null is List<*> -> any { it.isDanglingReference(allRecords) } is Map<*, *> -> values.any { it.isDanglingReference(allRecords) } else -> false @@ -244,7 +245,7 @@ private fun RecordValue.isDanglingReference(allRecords: Map): Bo private fun Record.isEmptyRecord() = fields.isEmpty() || fields.size == 1 && fields.keys.first() == "__typename" -private fun RecordValue.guessType(allRecords: Map): String { +private fun RecordValue.guessType(allRecords: Map): String { return when (this) { is List<*> -> { val first = firstOrNull() ?: return "" @@ -252,7 +253,7 @@ private fun RecordValue.guessType(allRecords: Map): String { } is CacheKey -> { - allRecords[key]?.get("__typename") as? String ?: "" + allRecords[this]?.get("__typename") as? String ?: "" } else -> { diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt index d5e0480b..0f596aa2 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt @@ -1,17 +1,32 @@ package com.apollographql.cache.normalized.api import com.apollographql.cache.normalized.internal.hashed +import kotlin.jvm.JvmInline import kotlin.jvm.JvmStatic /** * A [CacheKey] identifies an object in the cache. - * - * @param key The key of the object in the cache. The key must be globally unique. */ -class CacheKey( +@JvmInline +value class CacheKey private constructor( + /** + * The hashed key of the object in the cache. + */ val key: String, - internal val isHashed: Boolean = false, ) { + /** + * Builds a [CacheKey] from a key. + * + * @param key The key of the object in the cache. The key must be globally unique. + * @param isHashed If true, the key is already hashed. If false, the key will be hashed. + */ + constructor(key: String, isHashed: Boolean = false) : this( + if (isHashed || key == rootKey().key) { + key + } else { + key.hashed() + } + ) /** * Builds a [CacheKey] from a typename and a list of Strings. @@ -25,7 +40,8 @@ class CacheKey( values.forEach { append(it) } - } + }, + isHashed = false, ) /** @@ -35,17 +51,8 @@ class CacheKey( */ constructor(typename: String, vararg values: String) : this(typename, values.toList()) - override fun hashCode() = key.hashCode() - override fun equals(other: Any?): Boolean { - return key == (other as? CacheKey)?.key - } - override fun toString() = "CacheKey($key)" - internal val hashedKey by lazy { - if (isHashed) key else key.hashed() - } - fun serialize(): String { return "$SERIALIZATION_TEMPLATE{$key}" } diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheResolver.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheResolver.kt index 153f8b09..57cd921b 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheResolver.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheResolver.kt @@ -100,7 +100,7 @@ class ResolverContext( /** * The key of the parent. Mainly used for debugging */ - val parentKey: String, + val parentKey: CacheKey, /** * The type of the parent @@ -135,7 +135,7 @@ object DefaultCacheResolver : CacheResolver { override fun resolveField(context: ResolverContext): Any? { val fieldKey = context.getFieldKey() if (!context.parent.containsKey(fieldKey)) { - throw CacheMissException(context.parentKey, fieldKey) + throw CacheMissException(context.parentKey.key, fieldKey) } return context.parent[fieldKey] @@ -190,7 +190,7 @@ class CacheControlCacheResolver( val maxStale = context.cacheHeaders.headerValue(ApolloCacheHeaders.MAX_STALE)?.toLongOrNull() ?: 0L if (staleDuration >= maxStale) { throw CacheMissException( - key = context.parentKey, + key = context.parentKey.key, fieldName = context.getFieldKey(), stale = true ) @@ -206,7 +206,7 @@ class CacheControlCacheResolver( val maxStale = context.cacheHeaders.headerValue(ApolloCacheHeaders.MAX_STALE)?.toLongOrNull() ?: 0L if (staleDuration >= maxStale) { throw CacheMissException( - key = context.parentKey, + key = context.parentKey.key, fieldName = context.getFieldKey(), stale = true ) diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/NormalizedCache.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/NormalizedCache.kt index f2f79d28..c900b0b6 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/NormalizedCache.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/NormalizedCache.kt @@ -70,24 +70,9 @@ interface NormalizedCache : ReadOnlyNormalizedCache { */ fun remove(cacheKeys: Collection, cascade: Boolean): Int - /** - * Remove records whose key matches a given pattern from this cache and all chained caches - * - * @param pattern a pattern to filter the cache keys. 'pattern' is interpreted as in the LIKE operator of Sqlite. - * - '%' matches any sequence of zero or more characters - * - '_' matches any single character - * - The matching is case-insensitive - * - '\' is used as escape - * See https://sqlite.org/lang_expr.html for more details - * - * @return the number of records deleted accross all caches - */ - fun remove(pattern: String): Int - - companion object { @JvmStatic - fun prettifyDump(dump: Map<@JvmSuppressWildcards KClass<*>, Map>): String = dump.prettifyDump() + fun prettifyDump(dump: Map<@JvmSuppressWildcards KClass<*>, Map>): String = dump.prettifyDump() private fun Any?.prettifyDump(level: Int = 0): String { return buildString { @@ -128,6 +113,7 @@ interface NormalizedCache : ReadOnlyNormalizedCache { indent(level + 1) append(when (key) { is KClass<*> -> key.simpleName + is CacheKey -> key.key else -> key } ) diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/ReadOnlyNormalizedCache.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/ReadOnlyNormalizedCache.kt index 4291864d..5cbf8f00 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/ReadOnlyNormalizedCache.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/ReadOnlyNormalizedCache.kt @@ -9,7 +9,7 @@ interface ReadOnlyNormalizedCache { * @param cacheHeaders The cache headers associated with the request which generated this record. * @return The [Record] for key. If not present return null. */ - fun loadRecord(key: String, cacheHeaders: CacheHeaders): Record? + fun loadRecord(key: CacheKey, cacheHeaders: CacheHeaders): Record? /** * Calls through to [NormalizedCache.loadRecord]. Implementations should override this @@ -19,7 +19,7 @@ interface ReadOnlyNormalizedCache { * @param keys The set of [Record] keys to read. * @param cacheHeaders The cache headers associated with the request which generated this record. */ - fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection + fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection - fun dump(): Map<@JvmSuppressWildcards KClass<*>, Map> + fun dump(): Map<@JvmSuppressWildcards KClass<*>, Map> } diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/Record.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/Record.kt index 09627dae..4618e354 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/Record.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/Record.kt @@ -10,7 +10,7 @@ import com.benasher44.uuid.Uuid * a field is a GraphQL Object a [CacheKey] will be stored instead. */ class Record( - val key: String, + val key: CacheKey, val fields: Map, val mutationId: Uuid? = null, @@ -39,7 +39,7 @@ class Record( * A field key incorporates any GraphQL arguments in addition to the field name. */ fun fieldKeys(): Set { - return fields.keys.map { "$key.$it" }.toSet() + return fields.keys.map { key.fieldKey(it) }.toSet() } /** @@ -71,7 +71,7 @@ class Record( record1.fields[it] != record2.fields[it] } - return changed.map { "${record1.key}.$it" }.toSet() + return changed.map { record1.key.fieldKey(it) }.toSet() } } } @@ -120,3 +120,7 @@ fun Collection?.dependentKeys(): Set { it.fieldKeys() }?.toSet() ?: emptySet() } + +internal fun CacheKey.fieldKey(fieldName: String): String { + return "$key.$fieldName" +} diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/RecordMerger.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/RecordMerger.kt index c4bd633b..6808b32c 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/RecordMerger.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/RecordMerger.kt @@ -41,7 +41,7 @@ object DefaultRecordMerger : RecordMerger { val existingFieldValue = existing.fields[fieldKey] if (!hasExistingFieldValue || existingFieldValue != incomingFieldValue) { mergedFields[fieldKey] = incomingFieldValue - changedKeys.add("${existing.key}.$fieldKey") + changedKeys.add(existing.key.fieldKey(fieldKey)) } } @@ -107,7 +107,7 @@ class FieldRecordMerger(private val fieldMerger: FieldMerger) : RecordMerger { if (!hasExistingFieldValue) { mergedFields[fieldKey] = incomingFieldValue mergedMetadata[fieldKey] = incoming.metadata[fieldKey].orEmpty() - changedKeys.add("${existing.key}.$fieldKey") + changedKeys.add(existing.key.fieldKey(fieldKey)) } else if (existingFieldValue != incomingFieldValue) { val existingFieldInfo = FieldInfo( value = existingFieldValue, @@ -122,7 +122,7 @@ class FieldRecordMerger(private val fieldMerger: FieldMerger) : RecordMerger { mergedFields[fieldKey] = mergeResult.value mergedMetadata[fieldKey] = mergeResult.metadata - changedKeys.add("${existing.key}.$fieldKey") + changedKeys.add(existing.key.fieldKey(fieldKey)) } } diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/CacheBatchReader.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/CacheBatchReader.kt index 973b3add..323102da 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/CacheBatchReader.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/CacheBatchReader.kt @@ -24,7 +24,7 @@ import kotlin.jvm.JvmSuppressWildcards */ internal class CacheBatchReader( private val cache: ReadOnlyNormalizedCache, - private val rootKey: String, + private val rootKey: CacheKey, private val variables: Executable.Variables, private val cacheResolver: CacheResolver, private val cacheHeaders: CacheHeaders, @@ -38,7 +38,7 @@ internal class CacheBatchReader( * @param path: the path where this pending reference needs to be inserted */ class PendingReference( - val key: String, + val key: CacheKey, val path: List, val fieldPath: List, val selections: List, @@ -111,16 +111,16 @@ internal class CacheBatchReader( copy.forEach { pendingReference -> var record = records[pendingReference.key] if (record == null) { - if (pendingReference.key == CacheKey.rootKey().key) { + if (pendingReference.key == CacheKey.rootKey()) { // This happens the very first time we read the cache record = Record(pendingReference.key, emptyMap()) } else { if (returnPartialResponses) { data[pendingReference.path] = - cacheMissError(CacheMissException(key = pendingReference.key, fieldName = null, stale = false), path = pendingReference.path) + cacheMissError(CacheMissException(key = pendingReference.key.key, fieldName = null, stale = false), path = pendingReference.path) return@forEach } else { - throw CacheMissException(pendingReference.key) + throw CacheMissException(pendingReference.key.key) } } } @@ -194,7 +194,7 @@ internal class CacheBatchReader( is CacheKey -> { pendingReferences.add( PendingReference( - key = hashedKey, + key = this, selections = selections, parentType = parentType, path = path, @@ -224,7 +224,7 @@ internal class CacheBatchReader( field = it, variables = variables, parent = this, - parentKey = "", + parentKey = CacheKey("", isHashed = true), parentType = parentType, cacheHeaders = cacheHeaders, fieldKeyGenerator = fieldKeyGenerator, diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt index 8d8d83e2..74cd4ab9 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt @@ -106,7 +106,7 @@ internal class DefaultApolloStore( dataWithErrors: DataWithErrors, rootKey: String, customScalarAdapters: CustomScalarAdapters, - ): Map { + ): Map { return dataWithErrors.normalized( executable = executable, rootKey = rootKey, @@ -129,7 +129,7 @@ internal class DefaultApolloStore( cacheHeaders = cacheHeaders, cacheResolver = cacheResolver, variables = variables, - rootKey = CacheKey.rootKey().key, + rootKey = CacheKey.rootKey(), rootSelections = operation.rootField().selections, rootField = operation.rootField(), fieldKeyGenerator = fieldKeyGenerator, @@ -181,7 +181,7 @@ internal class DefaultApolloStore( cacheHeaders = cacheHeaders, cacheResolver = cacheResolver, variables = variables, - rootKey = cacheKey.key, + rootKey = cacheKey, rootSelections = fragment.rootField().selections, rootField = fragment.rootField(), fieldKeyGenerator = fieldKeyGenerator, @@ -298,7 +298,7 @@ internal class DefaultApolloStore( return cache.merge(record, cacheHeaders, recordMerger) } - override fun dump(): Map, Map> { + override fun dump(): Map, Map> { return cache.dump() } diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt index d21f9f30..f446cd5b 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt @@ -41,13 +41,13 @@ internal class Normalizer( private val fieldKeyGenerator: FieldKeyGenerator, private val embeddedFieldsProvider: EmbeddedFieldsProvider, ) { - private val records = mutableMapOf() + private val records = mutableMapOf() fun normalize( map: DataWithErrors, selections: List, parentType: CompiledNamedType, - ): Map { + ): Map { buildRecord(map, rootKey, selections, parentType) return records @@ -136,14 +136,15 @@ internal class Normalizer( val fields = buildFields(obj, key, selections, parentType) val fieldValues = fields.mapValues { it.value.fieldValue } val metadata = fields.mapValues { it.value.metadata }.filterValues { it.isNotEmpty() } + val cacheKey = CacheKey(key, isHashed = true) val record = Record( - key = key, + key = cacheKey, fields = fieldValues, mutationId = null, metadata = metadata, ) - val existingRecord = records[key] + val existingRecord = records[cacheKey] val mergedRecord = if (existingRecord != null) { /** @@ -153,9 +154,9 @@ internal class Normalizer( } else { record } - records[key] = mergedRecord + records[cacheKey] = mergedRecord - return CacheKey(key, isHashed = true) + return cacheKey } @@ -209,9 +210,8 @@ internal class Normalizer( )?.key if (key == null) { - key = path + key = path.hashed() } - key = key.hashed() if (embeddedFields.contains(field.name)) { buildFields(value, key, field.selections, field.type.rawType()) .mapValues { it.value.fieldValue } @@ -273,7 +273,7 @@ fun D.normalized( metadataGenerator: MetadataGenerator = EmptyMetadataGenerator, fieldKeyGenerator: FieldKeyGenerator = DefaultFieldKeyGenerator, embeddedFieldsProvider: EmbeddedFieldsProvider = DefaultEmbeddedFieldsProvider, -): Map { +): Map { val dataWithErrors = this.withErrors(executable, null, customScalarAdapters) return dataWithErrors.normalized(executable, rootKey, customScalarAdapters, cacheKeyGenerator, metadataGenerator, fieldKeyGenerator, embeddedFieldsProvider) } @@ -289,7 +289,7 @@ fun DataWithErrors.normalized( metadataGenerator: MetadataGenerator = EmptyMetadataGenerator, fieldKeyGenerator: FieldKeyGenerator = DefaultFieldKeyGenerator, embeddedFieldsProvider: EmbeddedFieldsProvider = DefaultEmbeddedFieldsProvider, -): Map { +): Map { val variables = executable.variables(customScalarAdapters, withDefaultValues = true) return Normalizer(variables, rootKey, cacheKeyGenerator, metadataGenerator, fieldKeyGenerator, embeddedFieldsProvider) .normalize(this, executable.rootField().selections, executable.rootField().type.rawType()) diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/OptimisticNormalizedCache.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/OptimisticNormalizedCache.kt index f7cd26f4..628f65cf 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/OptimisticNormalizedCache.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/OptimisticNormalizedCache.kt @@ -11,14 +11,14 @@ import kotlin.math.max import kotlin.reflect.KClass internal class OptimisticNormalizedCache(private val wrapped: NormalizedCache) : NormalizedCache { - private val recordJournals = ConcurrentMap() + private val recordJournals = ConcurrentMap() - override fun loadRecord(key: String, cacheHeaders: CacheHeaders): Record? { + override fun loadRecord(key: CacheKey, cacheHeaders: CacheHeaders): Record? { val nonOptimisticRecord = wrapped.loadRecord(key, cacheHeaders) return nonOptimisticRecord.mergeJournalRecord(key) } - override fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection { + override fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection { val nonOptimisticRecords = wrapped.loadRecords(keys, cacheHeaders).associateBy { it.key } return keys.mapNotNull { key -> nonOptimisticRecords[key].mergeJournalRecord(key) @@ -50,12 +50,12 @@ internal class OptimisticNormalizedCache(private val wrapped: NormalizedCache) : var total = 0 val referencedCacheKeys = mutableSetOf() for (cacheKey in cacheKeys) { - val removedRecordJournal = recordJournals.remove(cacheKey.key) + val removedRecordJournal = recordJournals.remove(cacheKey) if (removedRecordJournal != null) { total++ if (cascade) { for (cacheReference in removedRecordJournal.current.referencedFields()) { - referencedCacheKeys += CacheKey(cacheReference.key) + referencedCacheKeys += cacheReference } } } @@ -66,21 +66,6 @@ internal class OptimisticNormalizedCache(private val wrapped: NormalizedCache) : return total } - override fun remove(pattern: String): Int { - var removed = wrapped.remove(pattern) - - val regex = patternToRegex(pattern) - val keys = HashSet(recordJournals.keys) // local copy to avoid concurrent modification - keys.forEach { key -> - if (regex.matches(key)) { - recordJournals.remove(key) - removed++ - } - } - - return removed - } - fun addOptimisticUpdates(recordSet: Collection): Set { return recordSet.flatMap { addOptimisticUpdate(it) @@ -111,11 +96,11 @@ internal class OptimisticNormalizedCache(private val wrapped: NormalizedCache) : return changedCacheKeys } - override fun dump(): Map, Map> { + override fun dump(): Map, Map> { return mapOf(this::class to recordJournals.mapValues { (_, journal) -> journal.current }) + wrapped.dump() } - private fun Record?.mergeJournalRecord(key: String): Record? { + private fun Record?.mergeJournalRecord(key: CacheKey): Record? { val journal = recordJournals[key] return if (journal != null) { this?.mergeWith(journal.current)?.first ?: journal.current diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/RecordWeigher.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/RecordWeigher.kt index e5b9b6e6..ff780c4f 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/RecordWeigher.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/RecordWeigher.kt @@ -27,7 +27,7 @@ internal object RecordWeigher { @JvmStatic fun calculateBytes(record: Record): Int { - var size = SIZE_OF_RECORD_OVERHEAD + record.key.length + var size = SIZE_OF_RECORD_OVERHEAD + record.key.key.length for ((key, value) in record.fields) { size += key.length + weighField(value) } diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt index bf5fd586..82ba7a2a 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt @@ -10,7 +10,6 @@ import com.apollographql.cache.normalized.api.RecordMerger import com.apollographql.cache.normalized.api.RecordMergerContext import com.apollographql.cache.normalized.api.withDates import com.apollographql.cache.normalized.internal.Lock -import com.apollographql.cache.normalized.internal.patternToRegex import com.apollographql.cache.normalized.memory.internal.LruCache import kotlin.jvm.JvmOverloads import kotlin.reflect.KClass @@ -40,22 +39,22 @@ class MemoryCache( return lock?.read { block() } ?: block() } - private val lruCache = LruCache(maxSize = maxSizeBytes, expireAfterMillis = expireAfterMillis) { key, record -> - key.length + record.sizeInBytes + private val lruCache = LruCache(maxSize = maxSizeBytes, expireAfterMillis = expireAfterMillis) { key, record -> + key.key.length + record.sizeInBytes } val size: Int get() = lockRead { lruCache.weight() } - override fun loadRecord(key: String, cacheHeaders: CacheHeaders): Record? = lockRead { + override fun loadRecord(key: CacheKey, cacheHeaders: CacheHeaders): Record? = lockRead { val record = internalLoadRecord(key, cacheHeaders) record ?: nextCache?.loadRecord(key, cacheHeaders)?.also { nextCachedRecord -> lruCache[key] = nextCachedRecord } } - override fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection = lockRead { - val recordsByKey: Map = keys.associateWith { key -> internalLoadRecord(key, cacheHeaders) } + override fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection = lockRead { + val recordsByKey: Map = keys.associateWith { key -> internalLoadRecord(key, cacheHeaders) } val missingKeys = recordsByKey.filterValues { it == null }.keys val nextCachedRecords = nextCache?.loadRecords(missingKeys, cacheHeaders).orEmpty() for (record in nextCachedRecords) { @@ -64,7 +63,7 @@ class MemoryCache( recordsByKey.values.filterNotNull() + nextCachedRecords } - private fun internalLoadRecord(key: String, cacheHeaders: CacheHeaders): Record? { + private fun internalLoadRecord(key: CacheKey, cacheHeaders: CacheHeaders): Record? { return lruCache[key]?.also { if (cacheHeaders.hasHeader(ApolloCacheHeaders.EVICT_AFTER_READ)) { lruCache.remove(key) @@ -95,9 +94,9 @@ class MemoryCache( var total = 0 val referencedCacheKeys = mutableSetOf() for (cacheKey in cacheKeys) { - val removedRecord = lruCache.remove(cacheKey.key) + val removedRecord = lruCache.remove(cacheKey) if (cascade && removedRecord != null) { - referencedCacheKeys += removedRecord.referencedFields().map { CacheKey(it.key) } + referencedCacheKeys += removedRecord.referencedFields() } if (removedRecord != null) { total++ @@ -109,23 +108,6 @@ class MemoryCache( return total } - override fun remove(pattern: String): Int { - val regex = patternToRegex(pattern) - return lockWrite { - var total = 0 - val keys = HashSet(lruCache.asMap().keys) // local copy to avoid concurrent modification - keys.forEach { - if (regex.matches(it)) { - lruCache.remove(it) - total++ - } - } - - val chainRemoved = nextCache?.remove(pattern) ?: 0 - total + chainRemoved - } - } - override fun merge(record: Record, cacheHeaders: CacheHeaders, recordMerger: RecordMerger): Set { if (cacheHeaders.hasHeader(ApolloCacheHeaders.DO_NOT_STORE)) { return emptySet() @@ -161,7 +143,7 @@ class MemoryCache( return changedKeys } - override fun dump(): Map, Map> { + override fun dump(): Map, Map> { return lockRead { mapOf(this::class to lruCache.asMap().mapValues { (_, record) -> record }) + nextCache?.dump().orEmpty() diff --git a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/CacheKeyResolverTest.kt b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/CacheKeyResolverTest.kt index e82edcd7..c180b304 100644 --- a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/CacheKeyResolverTest.kt +++ b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/CacheKeyResolverTest.kt @@ -41,7 +41,7 @@ class CacheKeyResolverTest { field, Executable.Variables(emptyMap()), emptyMap(), - "", + CacheKey("", isHashed = true), "", CacheHeaders(emptyMap()), DefaultFieldKeyGenerator, diff --git a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/DefaultRecordMergerTest.kt b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/DefaultRecordMergerTest.kt index d1ca6c3d..f303cd1a 100644 --- a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/DefaultRecordMergerTest.kt +++ b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/DefaultRecordMergerTest.kt @@ -1,6 +1,7 @@ package com.apollographql.cache.normalized import com.apollographql.cache.normalized.api.CacheHeaders +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.DefaultRecordMerger import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.RecordMergerContext @@ -11,7 +12,7 @@ class DefaultRecordMergerTest { @Test fun mergeMetaData() { val existing = Record( - key = "key", + key = CacheKey("key"), fields = mapOf( "field1" to "value1", "field2" to "value2" @@ -30,7 +31,7 @@ class DefaultRecordMergerTest { ) val incoming = Record( - key = "key", + key = CacheKey("key"), fields = mapOf( "field1" to "value1.incoming", "field3" to "value3", @@ -51,7 +52,7 @@ class DefaultRecordMergerTest { val mergedRecord = DefaultRecordMerger.merge(RecordMergerContext(existing, incoming, CacheHeaders.NONE)).first val expected = Record( - key = "key", + key = CacheKey("key"), fields = mapOf( "field1" to "value1.incoming", "field2" to "value2", diff --git a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/MemoryCacheTest.kt b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/MemoryCacheTest.kt index d606fd1f..17a2048e 100644 --- a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/MemoryCacheTest.kt +++ b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/MemoryCacheTest.kt @@ -4,9 +4,9 @@ import com.apollographql.cache.normalized.api.ApolloCacheHeaders import com.apollographql.cache.normalized.api.CacheHeaders import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.DefaultRecordMerger -import com.apollographql.cache.normalized.memory.MemoryCache import com.apollographql.cache.normalized.api.NormalizedCache import com.apollographql.cache.normalized.api.Record +import com.apollographql.cache.normalized.memory.MemoryCache import kotlin.test.Test import kotlin.test.assertEquals import kotlin.test.assertNotNull @@ -46,14 +46,14 @@ class MemoryCacheTest { val records = listOf(testRecord1, testRecord2, testRecord3) lruCache.merge(records, CacheHeaders.NONE, DefaultRecordMerger) - val readRecords = lruCache.loadRecords(listOf("key1", "key2", "key3"), CacheHeaders.NONE) + val readRecords = lruCache.loadRecords(listOf(CacheKey("key1"), CacheKey("key2"), CacheKey("key3")), CacheHeaders.NONE) assertTrue(readRecords.containsAll(records)) } @Test fun testLoad_recordNotPresent() { val lruCache = createCache() - val record = lruCache.loadRecord("key1", CacheHeaders.NONE) + val record = lruCache.loadRecord(CacheKey("key1"), CacheHeaders.NONE) assertNull(record) } @@ -65,7 +65,7 @@ class MemoryCacheTest { val lruCache = createCache( // all records won't fit as there is timestamp that stored with each record - maxSizeBytes = 600 + maxSizeBytes = 342 ) val records = listOf(testRecord1, testRecord2, testRecord3) @@ -151,7 +151,7 @@ class MemoryCacheTest { fun testDualCache_recordNotPresent() { val secondaryCache = createCache() val primaryCache = createCache(nextCache = secondaryCache) - assertNull(primaryCache.loadRecord("key", CacheHeaders.NONE)) + assertNull(primaryCache.loadRecord(CacheKey("key"), CacheHeaders.NONE)) } @@ -238,7 +238,7 @@ class MemoryCacheTest { val lruCache = createCache() val record1 = Record( - key = "id_1", + key = CacheKey("id_1"), fields = mapOf( "a" to "stringValueA", "b" to "stringValueB" @@ -246,7 +246,7 @@ class MemoryCacheTest { ) val record2 = Record( - key = "id_2", + key = CacheKey("id_2"), fields = mapOf( "a" to CacheKey("id_1"), ) @@ -255,7 +255,7 @@ class MemoryCacheTest { val records = listOf(record1, record2) lruCache.merge(records, CacheHeaders.NONE, DefaultRecordMerger) - assertTrue(lruCache.remove(CacheKey(record2.key), cascade = false)) + assertTrue(lruCache.remove(record2.key, cascade = false)) assertNotNull(lruCache.loadRecord(record1.key, CacheHeaders.NONE)) } @@ -264,7 +264,7 @@ class MemoryCacheTest { val lruCache = createCache() val record1 = Record( - key = "id_1", + key = CacheKey("id_1"), fields = mapOf( "a" to "stringValueA", "b" to "stringValueB" @@ -272,7 +272,7 @@ class MemoryCacheTest { ) val record2 = Record( - key = "id_2", + key = CacheKey("id_2"), fields = mapOf( "a" to CacheKey("id_1"), ) @@ -281,7 +281,7 @@ class MemoryCacheTest { val records = listOf(record1, record2) lruCache.merge(records, CacheHeaders.NONE, DefaultRecordMerger) - assertTrue(lruCache.remove(CacheKey(record2.key), cascade = true)) + assertTrue(lruCache.remove(record2.key, cascade = true)) assertNull(lruCache.loadRecord(record1.key, CacheHeaders.NONE)) } @@ -301,7 +301,7 @@ class MemoryCacheTest { private fun createTestRecord(id: String): Record { return Record( - key = "key$id", + key = CacheKey("key$id"), fields = mapOf( "field1" to "stringValueA$id", "field2" to "stringValueB$id" diff --git a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/RecordWeigherTest.kt b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/RecordWeigherTest.kt index bbb949ec..51cac0df 100644 --- a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/RecordWeigherTest.kt +++ b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/RecordWeigherTest.kt @@ -19,7 +19,7 @@ class RecordWeigherTest { val expectedCacheKeyList = listOf(CacheKey("bar"), CacheKey("baz")) val expectedScalarList = listOf("scalarOne", "scalarTwo") val record = Record( - key = "root", + key = CacheKey("root"), fields = mapOf( "double" to expectedDouble, "string" to expectedStringValue, diff --git a/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api b/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api index 131ffa74..841d7f63 100644 --- a/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api +++ b/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api @@ -12,13 +12,12 @@ public final class com/apollographql/cache/normalized/sql/ApolloInitializer$Comp public final class com/apollographql/cache/normalized/sql/SqlNormalizedCache : com/apollographql/cache/normalized/api/NormalizedCache { public fun clearAll ()V public fun dump ()Ljava/util/Map; - public fun loadRecord (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; + public fun loadRecord-eNSUWrY (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; public fun loadRecords (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Collection; public fun merge (Lcom/apollographql/cache/normalized/api/Record;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public fun merge (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; - public fun remove (Lcom/apollographql/cache/normalized/api/CacheKey;Z)Z - public fun remove (Ljava/lang/String;)I public fun remove (Ljava/util/Collection;Z)I + public fun remove-eNSUWrY (Ljava/lang/String;Z)Z } public final class com/apollographql/cache/normalized/sql/SqlNormalizedCacheFactory_androidKt { diff --git a/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api b/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api index 5ec64ca0..f5ee66b2 100644 --- a/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api +++ b/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api @@ -1,13 +1,12 @@ public final class com/apollographql/cache/normalized/sql/SqlNormalizedCache : com/apollographql/cache/normalized/api/NormalizedCache { public fun clearAll ()V public fun dump ()Ljava/util/Map; - public fun loadRecord (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; + public fun loadRecord-eNSUWrY (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; public fun loadRecords (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Collection; public fun merge (Lcom/apollographql/cache/normalized/api/Record;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public fun merge (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; - public fun remove (Lcom/apollographql/cache/normalized/api/CacheKey;Z)Z - public fun remove (Ljava/lang/String;)I public fun remove (Ljava/util/Collection;Z)I + public fun remove-eNSUWrY (Ljava/lang/String;Z)Z } public final class com/apollographql/cache/normalized/sql/SqlNormalizedCacheFactory_jvmKt { diff --git a/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api b/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api index 25be1d66..1a1dd189 100644 --- a/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api +++ b/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api @@ -137,14 +137,13 @@ final class com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys final class com.apollographql.cache.normalized.sql/SqlNormalizedCache : com.apollographql.cache.normalized.api/NormalizedCache { // com.apollographql.cache.normalized.sql/SqlNormalizedCache|null[0] final fun clearAll() // com.apollographql.cache.normalized.sql/SqlNormalizedCache.clearAll|clearAll(){}[0] - final fun dump(): kotlin.collections/Map, kotlin.collections/Map> // com.apollographql.cache.normalized.sql/SqlNormalizedCache.dump|dump(){}[0] - final fun loadRecord(kotlin/String, com.apollographql.cache.normalized.api/CacheHeaders): com.apollographql.cache.normalized.api/Record? // com.apollographql.cache.normalized.sql/SqlNormalizedCache.loadRecord|loadRecord(kotlin.String;com.apollographql.cache.normalized.api.CacheHeaders){}[0] - final fun loadRecords(kotlin.collections/Collection, com.apollographql.cache.normalized.api/CacheHeaders): kotlin.collections/Collection // com.apollographql.cache.normalized.sql/SqlNormalizedCache.loadRecords|loadRecords(kotlin.collections.Collection;com.apollographql.cache.normalized.api.CacheHeaders){}[0] + final fun dump(): kotlin.collections/Map, kotlin.collections/Map> // com.apollographql.cache.normalized.sql/SqlNormalizedCache.dump|dump(){}[0] + final fun loadRecord(com.apollographql.cache.normalized.api/CacheKey, com.apollographql.cache.normalized.api/CacheHeaders): com.apollographql.cache.normalized.api/Record? // com.apollographql.cache.normalized.sql/SqlNormalizedCache.loadRecord|loadRecord(com.apollographql.cache.normalized.api.CacheKey;com.apollographql.cache.normalized.api.CacheHeaders){}[0] + final fun loadRecords(kotlin.collections/Collection, com.apollographql.cache.normalized.api/CacheHeaders): kotlin.collections/Collection // com.apollographql.cache.normalized.sql/SqlNormalizedCache.loadRecords|loadRecords(kotlin.collections.Collection;com.apollographql.cache.normalized.api.CacheHeaders){}[0] final fun merge(com.apollographql.cache.normalized.api/Record, com.apollographql.cache.normalized.api/CacheHeaders, com.apollographql.cache.normalized.api/RecordMerger): kotlin.collections/Set // com.apollographql.cache.normalized.sql/SqlNormalizedCache.merge|merge(com.apollographql.cache.normalized.api.Record;com.apollographql.cache.normalized.api.CacheHeaders;com.apollographql.cache.normalized.api.RecordMerger){}[0] final fun merge(kotlin.collections/Collection, com.apollographql.cache.normalized.api/CacheHeaders, com.apollographql.cache.normalized.api/RecordMerger): kotlin.collections/Set // com.apollographql.cache.normalized.sql/SqlNormalizedCache.merge|merge(kotlin.collections.Collection;com.apollographql.cache.normalized.api.CacheHeaders;com.apollographql.cache.normalized.api.RecordMerger){}[0] final fun remove(com.apollographql.cache.normalized.api/CacheKey, kotlin/Boolean): kotlin/Boolean // com.apollographql.cache.normalized.sql/SqlNormalizedCache.remove|remove(com.apollographql.cache.normalized.api.CacheKey;kotlin.Boolean){}[0] final fun remove(kotlin.collections/Collection, kotlin/Boolean): kotlin/Int // com.apollographql.cache.normalized.sql/SqlNormalizedCache.remove|remove(kotlin.collections.Collection;kotlin.Boolean){}[0] - final fun remove(kotlin/String): kotlin/Int // com.apollographql.cache.normalized.sql/SqlNormalizedCache.remove|remove(kotlin.String){}[0] } final const val com.apollographql.cache.normalized.sql/VERSION // com.apollographql.cache.normalized.sql/VERSION|{}VERSION[0] diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCache.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCache.kt index 2420e2ef..a7b24712 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCache.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCache.kt @@ -27,34 +27,34 @@ class SqlNormalizedCache internal constructor( } } - override fun loadRecord(key: String, cacheHeaders: CacheHeaders): Record? { + override fun loadRecord(key: CacheKey, cacheHeaders: CacheHeaders): Record? { if (cacheHeaders.hasHeader(ApolloCacheHeaders.MEMORY_CACHE_ONLY)) { return null } val evictAfterRead = cacheHeaders.hasHeader(EVICT_AFTER_READ) return maybeTransaction(evictAfterRead) { try { - recordDatabase.select(key) + recordDatabase.select(key.key) } catch (e: Exception) { // Unable to read the record from the database, it is possibly corrupted - treat this as a cache miss apolloExceptionHandler(Exception("Unable to read a record from the database", e)) null }?.also { if (evictAfterRead) { - recordDatabase.delete(key) + recordDatabase.delete(key.key) } } } } - override fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection { + override fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection { if (cacheHeaders.hasHeader(ApolloCacheHeaders.MEMORY_CACHE_ONLY)) { return emptyList() } val evictAfterRead = cacheHeaders.hasHeader(EVICT_AFTER_READ) return maybeTransaction(evictAfterRead) { try { - internalGetRecords(keys) + internalGetRecords(keys.map { it.key }) } catch (e: Exception) { // Unable to read the records from the database, it is possibly corrupted - treat this as a cache miss apolloExceptionHandler(Exception("Unable to read records from the database", e)) @@ -62,7 +62,7 @@ class SqlNormalizedCache internal constructor( }.also { if (evictAfterRead) { it.forEach { record -> - recordDatabase.delete(record.key) + recordDatabase.delete(record.key.key) } } } @@ -85,13 +85,6 @@ class SqlNormalizedCache internal constructor( } } - override fun remove(pattern: String): Int { - return recordDatabase.transaction { - recordDatabase.deleteMatching(pattern) - recordDatabase.changes().toInt() - } - } - override fun merge(record: Record, cacheHeaders: CacheHeaders, recordMerger: RecordMerger): Set { if (cacheHeaders.hasHeader(ApolloCacheHeaders.DO_NOT_STORE) || cacheHeaders.hasHeader(ApolloCacheHeaders.MEMORY_CACHE_ONLY)) { return emptySet() @@ -118,7 +111,7 @@ class SqlNormalizedCache internal constructor( } } - override fun dump(): Map, Map> { + override fun dump(): Map, Map> { return mapOf(this::class to recordDatabase.selectAll().associateBy { it.key }) } @@ -155,7 +148,7 @@ class SqlNormalizedCache internal constructor( val expirationDate = cacheHeaders.headerValue(ApolloCacheHeaders.EXPIRATION_DATE) recordDatabase.transaction { val oldRecords = internalGetRecords( - keys = records.map { it.key }, + keys = records.map { it.key.key }, ).associateBy { it.key } updatedRecordKeys = records.flatMap { record -> @@ -182,7 +175,7 @@ class SqlNormalizedCache internal constructor( val receivedDate = cacheHeaders.headerValue(ApolloCacheHeaders.RECEIVED_DATE) val expirationDate = cacheHeaders.headerValue(ApolloCacheHeaders.EXPIRATION_DATE) return recordDatabase.transaction { - val oldRecord = recordDatabase.select(record.key) + val oldRecord = recordDatabase.select(record.key.key) if (oldRecord == null) { recordDatabase.insert(record.withDates(receivedDate = receivedDate, expirationDate = expirationDate)) record.fieldKeys() diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/Blob2RecordDatabase.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/Blob2RecordDatabase.kt index 97b2bfad..045e4233 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/Blob2RecordDatabase.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/Blob2RecordDatabase.kt @@ -47,11 +47,11 @@ internal class Blob2RecordDatabase(private val blobQueries: Blob2Queries) : Reco } override fun insert(record: Record) { - blobQueries.insert(record.key, BlobRecordSerializer.serialize(record), record.receivedDate()) + blobQueries.insert(record.key.key, BlobRecordSerializer.serialize(record), record.receivedDate()) } override fun update(record: Record) { - blobQueries.update(BlobRecordSerializer.serialize(record), record.receivedDate(), record.key) + blobQueries.update(BlobRecordSerializer.serialize(record), record.receivedDate(), record.key.key) } override fun selectAll(): List { diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordDatabase.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordDatabase.kt index 99c2b28e..cde13190 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordDatabase.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordDatabase.kt @@ -46,11 +46,11 @@ internal class BlobRecordDatabase(private val blobQueries: BlobQueries) : Record } override fun insert(record: Record) { - blobQueries.insert(record.key, BlobRecordSerializer.serialize(record)) + blobQueries.insert(record.key.key, BlobRecordSerializer.serialize(record)) } override fun update(record: Record) { - blobQueries.update(BlobRecordSerializer.serialize(record), record.key) + blobQueries.update(BlobRecordSerializer.serialize(record), record.key.key) } override fun selectAll(): List { diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordSerializer.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordSerializer.kt index b8f78e11..1485c437 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordSerializer.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordSerializer.kt @@ -64,7 +64,7 @@ internal object BlobRecordSerializer { fields[name] = buffer.readAny() } - return Record(key, fields, null, metadata) + return Record(CacheKey(key, isHashed = true), fields, null, metadata) } private fun Buffer.writeString(value: String) { diff --git a/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt b/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt index e37380ba..569670f4 100644 --- a/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt +++ b/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt @@ -16,7 +16,6 @@ import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.sql.internal.BlobRecordDatabase import com.apollographql.cache.normalized.sql.internal.blob.BlobQueries import kotlin.test.BeforeTest -import kotlin.test.Ignore import kotlin.test.Test import kotlin.test.assertEquals import kotlin.test.assertNotNull @@ -109,7 +108,7 @@ class SqlNormalizedCacheTest { cacheHeaders = CacheHeaders.NONE, recordMerger = DefaultRecordMerger, ) - cache.remove(cacheKey = CacheKey(STANDARD_KEY), cascade = false) + cache.remove(cacheKey = STANDARD_KEY, cascade = false) val record = cache.loadRecord(STANDARD_KEY, CacheHeaders.NONE) assertNull(record) } @@ -181,7 +180,7 @@ class SqlNormalizedCacheTest { ) val record = cache.loadRecord(STANDARD_KEY, CacheHeaders.NONE) assertNotNull(record) - assertEquals(expected = setOf("$STANDARD_KEY.fieldKey", "$STANDARD_KEY.newFieldKey"), actual = changedKeys) + assertEquals(expected = setOf("${STANDARD_KEY.key}.fieldKey", "${STANDARD_KEY.key}.newFieldKey"), actual = changedKeys) assertEquals(expected = "valueUpdated", actual = record.fields["fieldKey"]) assertEquals(expected = true, actual = record.fields["newFieldKey"]) } @@ -206,28 +205,6 @@ class SqlNormalizedCacheTest { assertEquals(expected = true, actual = record.fields["newFieldKey"]) } - @Ignore - @Test - fun testPatternRemove() { - createRecord("specialKey1") - createRecord("specialKey2") - createRecord("regularKey1") - - cache.remove("specialKey%") - assertNull(cache.loadRecord("specialKey1", CacheHeaders.NONE)) - assertNull(cache.loadRecord("specialKey1", CacheHeaders.NONE)) - assertNotNull(cache.loadRecord("regularKey1", CacheHeaders.NONE)) - } - - @Ignore - @Test - fun testPatternRemoveWithEscape() { - createRecord("%1") - - cache.remove("\\%%") - assertNull(cache.loadRecord("%1", CacheHeaders.NONE)) - } - @Test fun exceptionCallsExceptionHandler() { val badCache = SqlNormalizedCache(BlobRecordDatabase(BlobQueries(BadDriver))) @@ -261,7 +238,7 @@ class SqlNormalizedCacheTest { // Creating a self-referencing record cache.merge( record = Record( - key = "selfRefKey", + key = CacheKey("selfRefKey"), fields = mapOf( "field1" to "value1", "selfRef" to CacheKey("selfRefKey"), @@ -274,7 +251,7 @@ class SqlNormalizedCacheTest { val result = cache.remove(cacheKey = CacheKey("selfRefKey"), cascade = true) assertTrue(result) - val record = cache.loadRecord("selfRefKey", CacheHeaders.NONE) + val record = cache.loadRecord(CacheKey("selfRefKey"), CacheHeaders.NONE) assertNull(record) } @@ -283,7 +260,7 @@ class SqlNormalizedCacheTest { // Creating two records that reference each other cache.merge( record = Record( - key = "key1", + key = CacheKey("key1"), fields = mapOf( "field1" to "value1", "refToKey2" to CacheKey("key2"), @@ -295,7 +272,7 @@ class SqlNormalizedCacheTest { cache.merge( record = Record( - key = "key2", + key = CacheKey("key2"), fields = mapOf( "field1" to "value2", "refToKey1" to CacheKey("key1"), @@ -308,8 +285,8 @@ class SqlNormalizedCacheTest { val result = cache.remove(cacheKey = CacheKey("key1"), cascade = true) assertTrue(result) - assertNull(cache.loadRecord("key1", CacheHeaders.NONE)) - assertNull(cache.loadRecord("key2", CacheHeaders.NONE)) + assertNull(cache.loadRecord(CacheKey("key1"), CacheHeaders.NONE)) + assertNull(cache.loadRecord(CacheKey("key2"), CacheHeaders.NONE)) } private val BadDriver = object : SqlDriver { @@ -352,7 +329,7 @@ class SqlNormalizedCacheTest { } } - private fun createRecord(key: String) { + private fun createRecord(key: CacheKey) { cache.merge( record = Record( key = key, @@ -367,7 +344,7 @@ class SqlNormalizedCacheTest { } companion object { - const val STANDARD_KEY = "key" - const val QUERY_ROOT_KEY = "QUERY_ROOT" + val STANDARD_KEY = CacheKey("key") + val QUERY_ROOT_KEY = CacheKey.rootKey() } } diff --git a/normalized-cache-sqlite-incubating/src/jvmTest/kotlin/com/apollographql/cache/normalized/sql/TrimTest.kt b/normalized-cache-sqlite-incubating/src/jvmTest/kotlin/com/apollographql/cache/normalized/sql/TrimTest.kt index 970f7e54..95afb135 100644 --- a/normalized-cache-sqlite-incubating/src/jvmTest/kotlin/com/apollographql/cache/normalized/sql/TrimTest.kt +++ b/normalized-cache-sqlite-incubating/src/jvmTest/kotlin/com/apollographql/cache/normalized/sql/TrimTest.kt @@ -1,6 +1,7 @@ package com.apollographql.cache.normalized.sql import com.apollographql.cache.normalized.api.CacheHeaders +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.DefaultRecordMerger import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.withDates @@ -23,7 +24,7 @@ class TrimTest { val largeString = "".padStart(1024, '?') val oldRecord = Record( - key = "old", + key = CacheKey("old"), fields = mapOf("key" to "value"), mutationId = null, metadata = emptyMap() @@ -32,7 +33,7 @@ class TrimTest { val newRecords = 0.until(2 * 1024).map { Record( - key = "new$it", + key = CacheKey("new$it"), fields = mapOf("key" to largeString), mutationId = null, metadata = emptyMap() @@ -40,13 +41,13 @@ class TrimTest { } cache.merge(newRecords, CacheHeaders.NONE, recordMerger = DefaultRecordMerger) - assertEquals(9596928, dbFile.length()) + assertEquals(9646080, dbFile.length()) // Trim the cache by 10% - val trimmedCache = TrimmableNormalizedCacheFactory(dbUrl, 9596928, 0.1f).create() + val trimmedCache = TrimmableNormalizedCacheFactory(dbUrl, 9646080, 0.1f).create() - assertEquals(8548352, dbFile.length()) + assertEquals(8552448, dbFile.length()) // The oldest key must have been removed - assertNull(trimmedCache.loadRecord("old", CacheHeaders.NONE)) + assertNull(trimmedCache.loadRecord(CacheKey("old"), CacheHeaders.NONE)) } } diff --git a/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt b/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt index ddb222ce..355b6bc5 100644 --- a/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt +++ b/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt @@ -15,9 +15,9 @@ import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.FetchPolicy import com.apollographql.cache.normalized.api.CacheHeaders +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.apolloStore import com.apollographql.cache.normalized.fetchPolicy -import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.optimisticUpdates import com.apollographql.cache.normalized.store @@ -453,8 +453,8 @@ class DeferNormalizedCacheTest { val cacheExceptionResponse = actual.last() assertIs(networkExceptionResponse.exception) assertIs(cacheExceptionResponse.exception) - val hashedKey = ("computers.0".hashed() + ".screen").hashed() - assertEquals("Object '$hashedKey' has no field named 'isColor'", cacheExceptionResponse.exception!!.message) + val key = CacheKey((CacheKey("computers.0").key + ".screen")).key + assertEquals("Object '$key' has no field named 'isColor'", cacheExceptionResponse.exception!!.message) } @Test @@ -541,7 +541,7 @@ class DeferNormalizedCacheTest { val multipartBody = mockServer.enqueueMultipart("application/json") multipartBody.enqueuePart(jsonList[0].encodeUtf8(), false) val recordFields = apolloClient.query(SimpleDeferQuery()).fetchPolicy(FetchPolicy.NetworkOnly).toFlow().map { - apolloClient.apolloStore.accessCache { it.loadRecord("computers.0".hashed(), CacheHeaders.NONE)!!.fields }.also { + apolloClient.apolloStore.accessCache { it.loadRecord(CacheKey("computers.0"), CacheHeaders.NONE)!!.fields }.also { multipartBody.enqueuePart(jsonList[1].encodeUtf8(), true) } }.toList() diff --git a/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt b/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt index dcbfef6d..bc31dca3 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt @@ -36,10 +36,10 @@ class DanglingReferencesTest { .execute() var allRecords = store.accessCache { it.allRecords() } - assertTrue(allRecords["Repository:0".hashed()]!!.fields.containsKey("starGazers")) + assertTrue(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("starGazers")) // Remove User 1, now Repository 0.starGazers is a dangling reference - store.remove(CacheKey("User:1".hashed()), cascade = false) + store.remove(CacheKey("User:1"), cascade = false) val removedFieldsAndRecords = store.removeDanglingReferences() assertEquals( setOf("${"Repository:0".hashed()}.starGazers"), @@ -50,7 +50,7 @@ class DanglingReferencesTest { removedFieldsAndRecords.removedRecords ) allRecords = store.accessCache { it.allRecords() } - assertFalse(allRecords["Repository:0".hashed()]!!.fields.containsKey("starGazers")) + assertFalse(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("starGazers")) } } @@ -75,7 +75,7 @@ class DanglingReferencesTest { // thus (metaProjects.0.0) is empty and removed // thus (QUERY_ROOT).metaProjects is a dangling reference // thus QUERY_ROOT is empty and removed - store.remove(CacheKey("User:0".hashed()), cascade = false) + store.remove(CacheKey("User:0"), cascade = false) val removedFieldsAndRecords = store.removeDanglingReferences() assertEquals( setOf( @@ -87,16 +87,16 @@ class DanglingReferencesTest { ) assertEquals( setOf( - CacheKey(("metaProjects.0.0".hashed() + ".type").hashed()), - CacheKey("metaProjects.0.0".hashed()), + CacheKey(("metaProjects.0.0".hashed() + ".type")), + CacheKey("metaProjects.0.0"), CacheKey("QUERY_ROOT"), ), removedFieldsAndRecords.removedRecords ) val allRecords = store.accessCache { it.allRecords() } - assertFalse(allRecords.containsKey("QUERY_ROOT")) - assertFalse(allRecords.containsKey("metaProjects.0.0")) - assertFalse(allRecords.containsKey("metaProjects.0.0.type")) + assertFalse(allRecords.containsKey(CacheKey("QUERY_ROOT"))) + assertFalse(allRecords.containsKey(CacheKey("metaProjects.0.0"))) + assertFalse(allRecords.containsKey(CacheKey("metaProjects.0.0".hashed() + ".type"))) } } diff --git a/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt b/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt index 1c41a60c..d458767b 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt @@ -60,9 +60,9 @@ class GarbageCollectTest { ) assertEquals( setOf( - CacheKey(("metaProjects.0.0".hashed() + ".type").hashed()), - CacheKey(("metaProjects.0.1".hashed() + ".type").hashed()), - CacheKey(("metaProjects.1.0".hashed() + ".type").hashed()), + CacheKey("metaProjects.0.0".hashed() + ".type"), + CacheKey("metaProjects.0.1".hashed() + ".type"), + CacheKey("metaProjects.1.0".hashed() + ".type"), ), garbageCollectResult.removedStaleFields.removedRecords ) @@ -78,9 +78,9 @@ class GarbageCollectTest { ) assertEquals( setOf( - CacheKey("metaProjects.0.0".hashed()), - CacheKey("metaProjects.0.1".hashed()), - CacheKey("metaProjects.1.0".hashed()), + CacheKey("metaProjects.0.0"), + CacheKey("metaProjects.0.1"), + CacheKey("metaProjects.1.0"), CacheKey("QUERY_ROOT"), ), garbageCollectResult.removedDanglingReferences.removedRecords @@ -88,9 +88,9 @@ class GarbageCollectTest { assertEquals( setOf( - CacheKey("User:0".hashed()), - CacheKey("User:1".hashed()), - CacheKey("User:2".hashed()), + CacheKey("User:0"), + CacheKey("User:1"), + CacheKey("User:2"), ), garbageCollectResult.removedUnreachableRecords ) diff --git a/tests/garbage-collection/src/commonTest/kotlin/ReachableCacheKeysTest.kt b/tests/garbage-collection/src/commonTest/kotlin/ReachableCacheKeysTest.kt index 17aae080..4462ed81 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/ReachableCacheKeysTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/ReachableCacheKeysTest.kt @@ -8,7 +8,6 @@ import com.apollographql.cache.normalized.allRecords import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.getReachableCacheKeys -import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.removeUnreachableRecords import com.apollographql.cache.normalized.sql.SqlNormalizedCacheFactory @@ -131,35 +130,35 @@ class ReachableCacheKeysTest { assertContentEquals( listOf( CacheKey("QUERY_ROOT"), - CacheKey("Repository:8".hashed()), - CacheKey("Repository:7".hashed()), - CacheKey("User:43".hashed()), - CacheKey("User:42".hashed()), - CacheKey("User:0".hashed()), - CacheKey("Repository:6".hashed()), - CacheKey("Repository:5".hashed()), - CacheKey("Repository:4".hashed()), - CacheKey("Repository:3".hashed()), - CacheKey("Repository:2".hashed()), - CacheKey("Repository:1".hashed()), + CacheKey("Repository:8"), + CacheKey("Repository:7"), + CacheKey("User:43"), + CacheKey("User:42"), + CacheKey("User:0"), + CacheKey("Repository:6"), + CacheKey("Repository:5"), + CacheKey("Repository:4"), + CacheKey("Repository:3"), + CacheKey("Repository:2"), + CacheKey("Repository:1"), ), reachableCacheKeys ) // Remove User 43, now Repositories 5 and 6 should not be reachable / 7 should still be reachable - store.remove(CacheKey("User:43".hashed()), cascade = false) + store.remove(CacheKey("User:43"), cascade = false) reachableCacheKeys = store.accessCache { it.allRecords().getReachableCacheKeys() } assertContentEquals( listOf( CacheKey("QUERY_ROOT"), - CacheKey("Repository:8".hashed()), - CacheKey("Repository:7".hashed()), - CacheKey("User:42".hashed()), - CacheKey("User:0".hashed()), - CacheKey("Repository:4".hashed()), - CacheKey("Repository:3".hashed()), - CacheKey("Repository:2".hashed()), - CacheKey("Repository:1".hashed()), + CacheKey("Repository:8"), + CacheKey("Repository:7"), + CacheKey("User:42"), + CacheKey("User:0"), + CacheKey("Repository:4"), + CacheKey("Repository:3"), + CacheKey("Repository:2"), + CacheKey("Repository:1"), ), reachableCacheKeys ) @@ -167,40 +166,40 @@ class ReachableCacheKeysTest { // Add a non-reachable Repository, reachableCacheKeys should not change store.writeFragment( RepositoryFragmentImpl(), - CacheKey("Repository:500".hashed()), + CacheKey("Repository:500"), RepositoryFragment(id = "500", __typename = "Repository", starGazers = emptyList()), ) reachableCacheKeys = store.accessCache { it.allRecords().getReachableCacheKeys() } assertContentEquals( listOf( CacheKey("QUERY_ROOT"), - CacheKey("Repository:8".hashed()), - CacheKey("Repository:7".hashed()), - CacheKey("User:42".hashed()), - CacheKey("User:0".hashed()), - CacheKey("Repository:4".hashed()), - CacheKey("Repository:3".hashed()), - CacheKey("Repository:2".hashed()), - CacheKey("Repository:1".hashed()), + CacheKey("Repository:8"), + CacheKey("Repository:7"), + CacheKey("User:42"), + CacheKey("User:0"), + CacheKey("Repository:4"), + CacheKey("Repository:3"), + CacheKey("Repository:2"), + CacheKey("Repository:1"), ), reachableCacheKeys ) assertEquals( setOf( - CacheKey("User:42".hashed()), - CacheKey("Repository:6".hashed()), - CacheKey("User:0".hashed()), - CacheKey("Repository:8".hashed()), - CacheKey("Repository:3".hashed()), - CacheKey("Repository:1".hashed()), - CacheKey("Repository:2".hashed()), - CacheKey("Repository:4".hashed()), + CacheKey("User:42"), + CacheKey("Repository:6"), + CacheKey("User:0"), + CacheKey("Repository:8"), + CacheKey("Repository:3"), + CacheKey("Repository:1"), + CacheKey("Repository:2"), + CacheKey("Repository:4"), CacheKey("QUERY_ROOT"), - CacheKey("Repository:5".hashed()), - CacheKey("Repository:500".hashed()), - CacheKey("Repository:7".hashed()), + CacheKey("Repository:5"), + CacheKey("Repository:500"), + CacheKey("Repository:7"), ), - store.accessCache { it.allRecords() }.keys.map { CacheKey(it) }.toSet() + store.accessCache { it.allRecords() }.keys.toSet() ) // Remove unreachable records, should remove Repositories 5, 6, and 500 @@ -208,22 +207,22 @@ class ReachableCacheKeysTest { assertEquals( setOf( CacheKey("QUERY_ROOT"), - CacheKey("Repository:8".hashed()), - CacheKey("Repository:7".hashed()), - CacheKey("User:42".hashed()), - CacheKey("User:0".hashed()), - CacheKey("Repository:4".hashed()), - CacheKey("Repository:3".hashed()), - CacheKey("Repository:2".hashed()), - CacheKey("Repository:1".hashed()), + CacheKey("Repository:8"), + CacheKey("Repository:7"), + CacheKey("User:42"), + CacheKey("User:0"), + CacheKey("Repository:4"), + CacheKey("Repository:3"), + CacheKey("Repository:2"), + CacheKey("Repository:1"), ), - store.accessCache { it.allRecords() }.keys.map { CacheKey(it) }.toSet() + store.accessCache { it.allRecords() }.keys.toSet() ) assertEquals( setOf( - CacheKey("Repository:6".hashed()), - CacheKey("Repository:5".hashed()), - CacheKey("Repository:500".hashed()), + CacheKey("Repository:6"), + CacheKey("Repository:5"), + CacheKey("Repository:500"), ), removedKeys ) diff --git a/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt b/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt index f3a9f54e..3213155d 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt @@ -46,10 +46,10 @@ class StaleFieldsTest { .execute() var allRecords = store.accessCache { it.allRecords() } - assertTrue(allRecords["Repository:0".hashed()]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:0".hashed()]!!.fields.containsKey("starGazers")) - assertTrue(allRecords["Repository:1".hashed()]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:1".hashed()]!!.fields.containsKey("starGazers")) + assertTrue(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("stars")) + assertTrue(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("starGazers")) + assertTrue(allRecords[CacheKey("Repository:1")]!!.fields.containsKey("stars")) + assertTrue(allRecords[CacheKey("Repository:1")]!!.fields.containsKey("starGazers")) val maxAgeProvider = SchemaCoordinatesMaxAgeProvider( Cache.maxAges, @@ -67,10 +67,10 @@ class StaleFieldsTest { emptySet(), removedFieldsAndRecords.removedRecords ) allRecords = store.accessCache { it.allRecords() } - assertFalse(allRecords["Repository:0".hashed()]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:0".hashed()]!!.fields.containsKey("starGazers")) - assertFalse(allRecords["Repository:1".hashed()]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:1".hashed()]!!.fields.containsKey("starGazers")) + assertFalse(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("stars")) + assertTrue(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("starGazers")) + assertFalse(allRecords[CacheKey("Repository:1")]!!.fields.containsKey("stars")) + assertTrue(allRecords[CacheKey("Repository:1")]!!.fields.containsKey("starGazers")) mockServer.enqueueString(REPOSITORY_LIST_RESPONSE) apolloClient.query(RepositoryListQuery()) @@ -91,10 +91,10 @@ class StaleFieldsTest { emptySet(), removedFieldsAndRecords.removedRecords ) allRecords = store.accessCache { it.allRecords() } - assertFalse(allRecords["Repository:0".hashed()]!!.fields.containsKey("stars")) - assertFalse(allRecords["Repository:0".hashed()]!!.fields.containsKey("starGazers")) - assertFalse(allRecords["Repository:1".hashed()]!!.fields.containsKey("stars")) - assertFalse(allRecords["Repository:1".hashed()]!!.fields.containsKey("starGazers")) + assertFalse(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("stars")) + assertFalse(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("starGazers")) + assertFalse(allRecords[CacheKey("Repository:1")]!!.fields.containsKey("stars")) + assertFalse(allRecords[CacheKey("Repository:1")]!!.fields.containsKey("starGazers")) } } @@ -114,10 +114,10 @@ class StaleFieldsTest { .execute() var allRecords = store.accessCache { it.allRecords() } - assertTrue(allRecords["projects.0".hashed()]!!.fields.containsKey("velocity")) - assertTrue(allRecords["projects.0".hashed()]!!.fields.containsKey("isUrgent")) - assertTrue(allRecords["projects.1".hashed()]!!.fields.containsKey("velocity")) - assertTrue(allRecords["projects.1".hashed()]!!.fields.containsKey("isUrgent")) + assertTrue(allRecords[CacheKey("projects.0")]!!.fields.containsKey("velocity")) + assertTrue(allRecords[CacheKey("projects.0")]!!.fields.containsKey("isUrgent")) + assertTrue(allRecords[CacheKey("projects.1")]!!.fields.containsKey("velocity")) + assertTrue(allRecords[CacheKey("projects.1")]!!.fields.containsKey("isUrgent")) val maxAgeProvider = SchemaCoordinatesMaxAgeProvider( Cache.maxAges, @@ -135,10 +135,10 @@ class StaleFieldsTest { emptySet(), removedFieldsAndRecords.removedRecords ) allRecords = store.accessCache { it.allRecords() } - assertFalse(allRecords["projects.0".hashed()]!!.fields.containsKey("velocity")) - assertTrue(allRecords["projects.0".hashed()]!!.fields.containsKey("isUrgent")) - assertFalse(allRecords["projects.1".hashed()]!!.fields.containsKey("velocity")) - assertTrue(allRecords["projects.1".hashed()]!!.fields.containsKey("isUrgent")) + assertFalse(allRecords[CacheKey("projects.0")]!!.fields.containsKey("velocity")) + assertTrue(allRecords[CacheKey("projects.0")]!!.fields.containsKey("isUrgent")) + assertFalse(allRecords[CacheKey("projects.1")]!!.fields.containsKey("velocity")) + assertTrue(allRecords[CacheKey("projects.1")]!!.fields.containsKey("isUrgent")) mockServer.enqueueString(PROJECT_LIST_RESPONSE) apolloClient.query(ProjectListQuery()) @@ -157,13 +157,13 @@ class StaleFieldsTest { ) assertEquals( setOf( - CacheKey("projects.0".hashed()), - CacheKey("projects.1".hashed()), + CacheKey("projects.0"), + CacheKey("projects.1"), ), removedFieldsAndRecords.removedRecords ) allRecords = store.accessCache { it.allRecords() } - assertFalse(allRecords.containsKey("projects.0".hashed())) - assertFalse(allRecords.containsKey("projects.1".hashed())) + assertFalse(allRecords.containsKey(CacheKey("projects.0"))) + assertFalse(allRecords.containsKey(CacheKey("projects.1"))) } } @@ -183,10 +183,10 @@ class StaleFieldsTest { .execute() var allRecords = store.accessCache { it.allRecords() } - assertTrue(allRecords["Repository:0".hashed()]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:0".hashed()]!!.fields.containsKey("starGazers")) - assertTrue(allRecords["Repository:1".hashed()]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:1".hashed()]!!.fields.containsKey("starGazers")) + assertTrue(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("stars")) + assertTrue(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("starGazers")) + assertTrue(allRecords[CacheKey("Repository:1")]!!.fields.containsKey("stars")) + assertTrue(allRecords[CacheKey("Repository:1")]!!.fields.containsKey("starGazers")) var removedFieldsAndRecords = store.removeStaleFields(GlobalMaxAgeProvider(Duration.INFINITE)) // Everything is stale @@ -214,11 +214,11 @@ class StaleFieldsTest { ) assertEquals( setOf( - CacheKey("Repository:0".hashed()), - CacheKey("Repository:1".hashed()), - CacheKey("User:0".hashed()), - CacheKey("User:1".hashed()), - CacheKey("User:2".hashed()), + CacheKey("Repository:0"), + CacheKey("Repository:1"), + CacheKey("User:0"), + CacheKey("User:1"), + CacheKey("User:2"), CacheKey("QUERY_ROOT"), ), removedFieldsAndRecords.removedRecords ) @@ -242,10 +242,10 @@ class StaleFieldsTest { removedFieldsAndRecords.removedRecords ) allRecords = store.accessCache { it.allRecords() } - assertTrue(allRecords["Repository:0".hashed()]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:0".hashed()]!!.fields.containsKey("starGazers")) - assertTrue(allRecords["Repository:1".hashed()]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:1".hashed()]!!.fields.containsKey("starGazers")) + assertTrue(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("stars")) + assertTrue(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("starGazers")) + assertTrue(allRecords[CacheKey("Repository:1")]!!.fields.containsKey("stars")) + assertTrue(allRecords[CacheKey("Repository:1")]!!.fields.containsKey("starGazers")) } } diff --git a/tests/include-skip-operation-based/src/commonTest/kotlin/IncludeTest.kt b/tests/include-skip-operation-based/src/commonTest/kotlin/IncludeTest.kt index bdf7df02..d932a362 100644 --- a/tests/include-skip-operation-based/src/commonTest/kotlin/IncludeTest.kt +++ b/tests/include-skip-operation-based/src/commonTest/kotlin/IncludeTest.kt @@ -5,7 +5,7 @@ import com.apollographql.apollo.api.Optional import com.apollographql.apollo.api.json.MapJsonReader import com.apollographql.apollo.api.toApolloResponse import com.apollographql.apollo.testing.internal.runTest -import com.apollographql.cache.normalized.internal.hashed +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.internal.normalized import com.example.GetCatIncludeVariableWithDefaultQuery import com.example.SkipFragmentWithDefaultToFalseQuery @@ -30,7 +30,7 @@ class IncludeTest { } val normalized = data.normalized(operation) - assertNull((normalized["animal".hashed()] as Map<*, *>)["species"]) + assertNull((normalized[CacheKey("animal")] as Map<*, *>)["species"]) } @Test @@ -44,6 +44,6 @@ class IncludeTest { } val normalized = data.normalized(operation) - assertNull((normalized["animal".hashed()] as Map<*, *>)["barf"]) + assertNull((normalized[CacheKey("animal")] as Map<*, *>)["barf"]) } } diff --git a/tests/migration/src/commonTest/kotlin/MigrationTest.kt b/tests/migration/src/commonTest/kotlin/MigrationTest.kt index eee75de2..521b3c6a 100644 --- a/tests/migration/src/commonTest/kotlin/MigrationTest.kt +++ b/tests/migration/src/commonTest/kotlin/MigrationTest.kt @@ -169,7 +169,7 @@ private fun LegacyNormalizedCache.allRecords(): List { } private fun LegacyRecord.toRecord(): Record = Record( - key = key, + key = CacheKey(key), fields = fields.mapValues { (_, value) -> value.toRecordValue() }, mutationId = mutationId ) diff --git a/tests/models-operation-based-with-interfaces/src/commonTest/kotlin/test/StoreTest.kt b/tests/models-operation-based-with-interfaces/src/commonTest/kotlin/test/StoreTest.kt index 89800b8d..829e8138 100644 --- a/tests/models-operation-based-with-interfaces/src/commonTest/kotlin/test/StoreTest.kt +++ b/tests/models-operation-based-with-interfaces/src/commonTest/kotlin/test/StoreTest.kt @@ -11,7 +11,6 @@ import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.IdCacheKeyGenerator -import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.store import com.apollographql.mockserver.MockServer @@ -45,7 +44,7 @@ class StoreTest { val heroWithFriendsFragment = store.readFragment( HeroWithFriendsFragmentImpl(), - CacheKey("Character:2001".hashed()), + CacheKey("Character:2001"), ).data assertEquals(heroWithFriendsFragment.id, "2001") assertEquals(heroWithFriendsFragment.name, "R2-D2") @@ -59,7 +58,7 @@ class StoreTest { var fragment = store.readFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1000".hashed()), + CacheKey("Character:1000"), ).data assertEquals(fragment.id, "1000") @@ -67,14 +66,14 @@ class StoreTest { fragment = store.readFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1002".hashed()), + CacheKey("Character:1002"), ).data assertEquals(fragment.id, "1002") assertEquals(fragment.name, "Han Solo") fragment = store.readFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1003".hashed()), + CacheKey("Character:1003"), ).data assertEquals(fragment.id, "1003") assertEquals(fragment.name, "Leia Organa") @@ -101,7 +100,7 @@ class StoreTest { store.writeFragment( HeroWithFriendsFragmentImpl(), - CacheKey("Character:2001".hashed()), + CacheKey("Character:2001"), HeroWithFriendsFragment( "2001", "R222-D222", @@ -126,7 +125,7 @@ class StoreTest { store.writeFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1002".hashed()), + CacheKey("Character:1002"), HumanWithIdFragment( "1002", "Beast" diff --git a/tests/models-operation-based/src/commonTest/kotlin/test/StoreTest.kt b/tests/models-operation-based/src/commonTest/kotlin/test/StoreTest.kt index 3b27d844..5c500a81 100644 --- a/tests/models-operation-based/src/commonTest/kotlin/test/StoreTest.kt +++ b/tests/models-operation-based/src/commonTest/kotlin/test/StoreTest.kt @@ -11,7 +11,6 @@ import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.IdCacheKeyGenerator -import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.store import com.apollographql.mockserver.MockServer @@ -45,7 +44,7 @@ class StoreTest { val heroWithFriendsFragment = store.readFragment( HeroWithFriendsFragmentImpl(), - CacheKey("Character:2001".hashed()), + CacheKey("Character:2001"), ).data assertEquals(heroWithFriendsFragment.id, "2001") assertEquals(heroWithFriendsFragment.name, "R2-D2") @@ -59,7 +58,7 @@ class StoreTest { var fragment = store.readFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1000".hashed()), + CacheKey("Character:1000"), ).data assertEquals(fragment.id, "1000") @@ -67,14 +66,14 @@ class StoreTest { fragment = store.readFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1002".hashed()), + CacheKey("Character:1002"), ).data assertEquals(fragment.id, "1002") assertEquals(fragment.name, "Han Solo") fragment = store.readFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1003".hashed()), + CacheKey("Character:1003"), ).data assertEquals(fragment.id, "1003") assertEquals(fragment.name, "Leia Organa") @@ -101,7 +100,7 @@ class StoreTest { store.writeFragment( HeroWithFriendsFragmentImpl(), - CacheKey("Character:2001".hashed()), + CacheKey("Character:2001"), HeroWithFriendsFragment( "2001", "R222-D222", @@ -126,7 +125,7 @@ class StoreTest { store.writeFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1002".hashed()), + CacheKey("Character:1002"), HumanWithIdFragment( "1002", "Beast" diff --git a/tests/models-response-based/src/commonTest/kotlin/test/StoreTest.kt b/tests/models-response-based/src/commonTest/kotlin/test/StoreTest.kt index 8798efa2..c02b0d84 100644 --- a/tests/models-response-based/src/commonTest/kotlin/test/StoreTest.kt +++ b/tests/models-response-based/src/commonTest/kotlin/test/StoreTest.kt @@ -12,7 +12,6 @@ import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.IdCacheKeyGenerator -import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.store import com.apollographql.mockserver.MockServer @@ -46,7 +45,7 @@ class StoreTest { val heroWithFriendsFragment = store.readFragment( HeroWithFriendsFragmentImpl(), - CacheKey("Character:2001".hashed()), + CacheKey("Character:2001"), ).data assertEquals(heroWithFriendsFragment.id, "2001") assertEquals(heroWithFriendsFragment.name, "R2-D2") @@ -60,7 +59,7 @@ class StoreTest { var fragment = store.readFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1000".hashed()), + CacheKey("Character:1000"), ).data assertEquals(fragment.id, "1000") @@ -68,14 +67,14 @@ class StoreTest { fragment = store.readFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1002".hashed()), + CacheKey("Character:1002"), ).data assertEquals(fragment.id, "1002") assertEquals(fragment.name, "Han Solo") fragment = store.readFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1003".hashed()), + CacheKey("Character:1003"), ).data assertEquals(fragment.id, "1003") assertEquals(fragment.name, "Leia Organa") @@ -102,7 +101,7 @@ class StoreTest { store.writeFragment( HeroWithFriendsFragmentImpl(), - CacheKey("Character:2001".hashed()), + CacheKey("Character:2001"), HeroWithFriendsFragmentImpl.Data( id = "2001", name = "R222-D222", @@ -123,7 +122,7 @@ class StoreTest { store.writeFragment( HumanWithIdFragmentImpl(), - CacheKey("Character:1002".hashed()), + CacheKey("Character:1002"), HumanWithIdFragmentImpl.Data( id = "1002", name = "Beast" diff --git a/tests/normalized-cache/src/commonTest/kotlin/MemoryCacheTest.kt b/tests/normalized-cache/src/commonTest/kotlin/MemoryCacheTest.kt index 6b80fae0..22f81e8b 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/MemoryCacheTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/MemoryCacheTest.kt @@ -2,6 +2,7 @@ package test import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.api.CacheHeaders +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.DefaultRecordMerger import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.memory.MemoryCache @@ -14,7 +15,7 @@ class MemoryCacheTest { @Test fun testDoesNotExpireBeforeMillis() = runTest { val record = Record( - key = "key", + key = CacheKey("key"), fields = mapOf( "field" to "value" ) diff --git a/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt b/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt index b6dadb34..d78f7e29 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt @@ -34,8 +34,6 @@ import kotlin.test.assertTrue class NormalizerTest { private lateinit var normalizedCache: NormalizedCache - private val rootKey = "QUERY_ROOT" - @BeforeTest fun setUp() { normalizedCache = MemoryCacheFactory().create() @@ -45,10 +43,10 @@ class NormalizerTest { @Throws(Exception::class) fun testHeroName() { val records = records(HeroNameQuery(), "HeroNameResponse.json") - val record = records.get(rootKey) + val record = records.get(CacheKey.rootKey()) val reference = record!!["hero"] as CacheKey? - assertEquals(reference, CacheKey("hero".hashed())) - val heroRecord = records.get(reference!!.key) + assertEquals(reference, CacheKey("hero")) + val heroRecord = records.get(reference!!) assertEquals(heroRecord!!["name"], "R2-D2") } @@ -56,30 +54,30 @@ class NormalizerTest { @Throws(Exception::class) fun testMergeNull() { val record = Record( - key = "Key", + key = CacheKey("Key"), fields = mapOf("field1" to "value1"), ) normalizedCache.merge(listOf(record), CacheHeaders.NONE, DefaultRecordMerger) val newRecord = Record( - key = "Key", + key = CacheKey("Key"), fields = mapOf("field2" to null), ) normalizedCache.merge(listOf(newRecord), CacheHeaders.NONE, DefaultRecordMerger) val finalRecord = normalizedCache.loadRecord(record.key, CacheHeaders.NONE) assertTrue(finalRecord!!.containsKey("field2")) - normalizedCache.remove(CacheKey(record.key), false) + normalizedCache.remove(record.key, false) } @Test @Throws(Exception::class) fun testHeroNameWithVariable() { val records = records(EpisodeHeroNameQuery(Episode.JEDI), "EpisodeHeroNameResponse.json") - val record = records.get(rootKey) + val record = records.get(CacheKey.rootKey()) val reference = record!![TEST_FIELD_KEY_JEDI] as CacheKey? - assertEquals(reference, CacheKey(TEST_FIELD_KEY_JEDI.hashed())) - val heroRecord = records.get(reference!!.key) + assertEquals(reference, CacheKey(TEST_FIELD_KEY_JEDI)) + val heroRecord = records.get(reference!!) assertEquals(heroRecord!!["name"], "R2-D2") } @@ -88,12 +86,12 @@ class NormalizerTest { fun testHeroAppearsInQuery() { val records = records(HeroAppearsInQuery(), "HeroAppearsInResponse.json") - val rootRecord = records.get(rootKey)!! + val rootRecord = records.get(CacheKey.rootKey())!! val heroReference = rootRecord["hero"] as CacheKey? - assertEquals(heroReference, CacheKey("hero".hashed())) + assertEquals(heroReference, CacheKey("hero")) - val hero = records.get(heroReference!!.key) + val hero = records.get(heroReference!!) assertEquals(hero?.get("appearsIn"), listOf("NEWHOPE", "EMPIRE", "JEDI")) } @@ -101,20 +99,20 @@ class NormalizerTest { @Throws(Exception::class) fun testHeroAndFriendsNamesQueryWithoutIDs() { val records = records(HeroAndFriendsNamesQuery(Episode.JEDI), "HeroAndFriendsNameResponse.json") - val record = records.get(rootKey) + val record = records.get(CacheKey.rootKey()) val heroReference = record!![TEST_FIELD_KEY_JEDI] as CacheKey? - assertEquals(heroReference, CacheKey(TEST_FIELD_KEY_JEDI.hashed())) - val heroRecord = records.get(heroReference!!.key) + assertEquals(heroReference, CacheKey(TEST_FIELD_KEY_JEDI)) + val heroRecord = records.get(heroReference!!) assertEquals(heroRecord!!["name"], "R2-D2") assertEquals( listOf( - CacheKey("${TEST_FIELD_KEY_JEDI.hashed()}.friends.0".hashed()), - CacheKey("${TEST_FIELD_KEY_JEDI.hashed()}.friends.1".hashed()), - CacheKey("${TEST_FIELD_KEY_JEDI.hashed()}.friends.2".hashed()) + CacheKey("${TEST_FIELD_KEY_JEDI.hashed()}.friends.0"), + CacheKey("${TEST_FIELD_KEY_JEDI.hashed()}.friends.1"), + CacheKey("${TEST_FIELD_KEY_JEDI.hashed()}.friends.2") ), heroRecord["friends"] ) - val luke = records.get("${TEST_FIELD_KEY_JEDI.hashed()}.friends.0".hashed()) + val luke = records.get(CacheKey("${TEST_FIELD_KEY_JEDI.hashed()}.friends.0")) assertEquals(luke!!["name"], "Luke Skywalker") } @@ -122,20 +120,20 @@ class NormalizerTest { @Throws(Exception::class) fun testHeroAndFriendsNamesQueryWithIDs() { val records = records(HeroAndFriendsNamesWithIDsQuery(Episode.JEDI), "HeroAndFriendsNameWithIdsResponse.json") - val record = records.get(rootKey) + val record = records.get(CacheKey.rootKey()) val heroReference = record!![TEST_FIELD_KEY_JEDI] as CacheKey? - assertEquals(CacheKey("Character:2001".hashed()), heroReference) - val heroRecord = records.get(heroReference!!.key) + assertEquals(CacheKey("Character:2001"), heroReference) + val heroRecord = records.get(heroReference!!) assertEquals(heroRecord!!["name"], "R2-D2") assertEquals( listOf( - CacheKey("Character:1000".hashed()), - CacheKey("Character:1002".hashed()), - CacheKey("Character:1003".hashed()) + CacheKey("Character:1000"), + CacheKey("Character:1002"), + CacheKey("Character:1003") ), heroRecord["friends"] ) - val luke = records.get("Character:1000".hashed()) + val luke = records.get(CacheKey("Character:1000")) assertEquals(luke!!["name"], "Luke Skywalker") } @@ -143,20 +141,20 @@ class NormalizerTest { @Throws(Exception::class) fun testHeroAndFriendsNamesWithIDForParentOnly() { val records = records(HeroAndFriendsNamesWithIDForParentOnlyQuery(Episode.JEDI), "HeroAndFriendsNameWithIdsParentOnlyResponse.json") - val record = records[rootKey] + val record = records[CacheKey.rootKey()] val heroReference = record!![TEST_FIELD_KEY_JEDI] as CacheKey? - assertEquals(CacheKey("Character:2001".hashed()), heroReference) - val heroRecord = records.get(heroReference!!.key) + assertEquals(CacheKey("Character:2001"), heroReference) + val heroRecord = records.get(heroReference!!) assertEquals(heroRecord!!["name"], "R2-D2") assertEquals( listOf( - CacheKey("${"Character:2001".hashed()}.friends.0".hashed()), - CacheKey("${"Character:2001".hashed()}.friends.1".hashed()), - CacheKey("${"Character:2001".hashed()}.friends.2".hashed()) + CacheKey("${CacheKey("Character:2001").key}.friends.0"), + CacheKey("${CacheKey("Character:2001").key}.friends.1"), + CacheKey("${CacheKey("Character:2001").key}.friends.2") ), heroRecord["friends"] ) - val luke = records.get("${"Character:2001".hashed()}.friends.0".hashed()) + val luke = records.get(CacheKey("${CacheKey("Character:2001").key}.friends.0")) assertEquals(luke!!["name"], "Luke Skywalker") } @@ -164,9 +162,9 @@ class NormalizerTest { @Throws(Exception::class) fun testSameHeroTwiceQuery() { val records = records(SameHeroTwiceQuery(), "SameHeroTwiceResponse.json") - val record = records.get(rootKey) + val record = records.get(CacheKey.rootKey()) val heroReference = record!!["hero"] as CacheKey? - val hero = records.get(heroReference!!.key) + val hero = records.get(heroReference!!) assertEquals(hero!!["name"], "R2-D2") assertEquals(hero["appearsIn"], listOf("NEWHOPE", "EMPIRE", "JEDI")) @@ -176,9 +174,9 @@ class NormalizerTest { @Throws(Exception::class) fun testHeroTypeDependentAliasedFieldQueryDroid() { val records = records(HeroTypeDependentAliasedFieldQuery(Episode.JEDI), "HeroTypeDependentAliasedFieldResponse.json") - val record = records.get(rootKey) + val record = records.get(CacheKey.rootKey()) val heroReference = record!![TEST_FIELD_KEY_JEDI] as CacheKey? - val hero = records.get(heroReference!!.key) + val hero = records.get(heroReference!!) assertEquals(hero!!["primaryFunction"], "Astromech") assertEquals(hero["__typename"], "Droid") } @@ -187,9 +185,9 @@ class NormalizerTest { @Throws(Exception::class) fun testHeroTypeDependentAliasedFieldQueryHuman() { val records = records(HeroTypeDependentAliasedFieldQuery(Episode.EMPIRE), "HeroTypeDependentAliasedFieldResponseHuman.json") - val record = records.get(rootKey) + val record = records.get(CacheKey.rootKey()) val heroReference = record!![TEST_FIELD_KEY_EMPIRE] as CacheKey? - val hero = records.get(heroReference!!.key) + val hero = records.get(heroReference!!) assertEquals(hero!!["homePlanet"], "Tatooine") assertEquals(hero["__typename"], "Human") } @@ -198,9 +196,9 @@ class NormalizerTest { @Throws(Exception::class) fun testHeroParentTypeDependentAliasedFieldQueryHuman() { val records = records(HeroTypeDependentAliasedFieldQuery(Episode.EMPIRE), "HeroTypeDependentAliasedFieldResponseHuman.json") - val record = records.get(rootKey) + val record = records.get(CacheKey.rootKey()) val heroReference = record!![TEST_FIELD_KEY_EMPIRE] as CacheKey? - val hero = records.get(heroReference!!.key) + val hero = records.get(heroReference!!) assertEquals(hero!!["homePlanet"], "Tatooine") assertEquals(hero["__typename"], "Human") } @@ -209,17 +207,17 @@ class NormalizerTest { @Throws(Exception::class) fun testHeroParentTypeDependentFieldDroid() { val records = records(HeroParentTypeDependentFieldQuery(Episode.JEDI), "HeroParentTypeDependentFieldDroidResponse.json") - val lukeRecord = records.get((TEST_FIELD_KEY_JEDI.hashed() + ".friends.0").hashed()) + val lukeRecord = records.get(CacheKey(TEST_FIELD_KEY_JEDI.hashed() + ".friends.0")) assertEquals(lukeRecord!!["name"], "Luke Skywalker") assertEquals(lukeRecord["height({\"unit\":\"METER\"})"], 1.72) - val friends = records[TEST_FIELD_KEY_JEDI.hashed()]!!["friends"] + val friends = records[CacheKey(TEST_FIELD_KEY_JEDI)]!!["friends"] assertIs>(friends) - assertEquals(friends[0], CacheKey((TEST_FIELD_KEY_JEDI.hashed() + ".friends.0").hashed())) - assertEquals(friends[1], CacheKey((TEST_FIELD_KEY_JEDI.hashed() + ".friends.1").hashed())) - assertEquals(friends[2], CacheKey((TEST_FIELD_KEY_JEDI.hashed() + ".friends.2").hashed())) + assertEquals(friends[0], CacheKey((TEST_FIELD_KEY_JEDI.hashed() + ".friends.0"))) + assertEquals(friends[1], CacheKey((TEST_FIELD_KEY_JEDI.hashed() + ".friends.1"))) + assertEquals(friends[2], CacheKey((TEST_FIELD_KEY_JEDI.hashed() + ".friends.2"))) } @Test @@ -227,11 +225,11 @@ class NormalizerTest { val records = records(AllPlanetsQuery(), "AllPlanetsListOfObjectWithNullObject.json") val fieldKey = "allPlanets({\"first\":300})".hashed() - var record: Record? = records["$fieldKey.planets.0".hashed()] + var record: Record? = records[CacheKey("$fieldKey.planets.0")] assertTrue(record?.get("filmConnection") == null) - record = records.get("${"$fieldKey.planets.0".hashed()}.filmConnection".hashed()) as Record? + record = records.get(CacheKey("${"$fieldKey.planets.0".hashed()}.filmConnection")) assertTrue(record == null) - record = records.get("${"$fieldKey.planets.1".hashed()}.filmConnection".hashed()) + record = records.get(CacheKey("${"$fieldKey.planets.1".hashed()}.filmConnection")) assertTrue(record != null) } @@ -241,13 +239,13 @@ class NormalizerTest { fun testHeroParentTypeDependentFieldHuman() { val records = records(HeroParentTypeDependentFieldQuery(Episode.EMPIRE), "HeroParentTypeDependentFieldHumanResponse.json") - val lukeRecord = records.get("${TEST_FIELD_KEY_EMPIRE.hashed()}.friends.0".hashed()) + val lukeRecord = records.get(CacheKey("${TEST_FIELD_KEY_EMPIRE.hashed()}.friends.0")) assertEquals(lukeRecord!!["name"], "Han Solo") assertEquals(lukeRecord["height({\"unit\":\"FOOT\"})"], 5.905512) } companion object { - internal fun records(operation: Operation, name: String): Map { + internal fun records(operation: Operation, name: String): Map { val response = testFixtureToJsonReader(name).toApolloResponse(operation) return response.data!!.normalized(operation, cacheKeyGenerator = IdCacheKeyGenerator()) } diff --git a/tests/normalized-cache/src/commonTest/kotlin/OptimisticCacheTest.kt b/tests/normalized-cache/src/commonTest/kotlin/OptimisticCacheTest.kt index 3323fa68..5abe1a68 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/OptimisticCacheTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/OptimisticCacheTest.kt @@ -9,7 +9,6 @@ import com.apollographql.cache.normalized.FetchPolicy import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.IdCacheKeyGenerator import com.apollographql.cache.normalized.fetchPolicy -import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.optimisticUpdates import com.apollographql.cache.normalized.refetchPolicy @@ -123,7 +122,7 @@ class OptimisticCacheTest { store.writeOptimisticUpdates( HeroAndFriendsNamesFragmentImpl(), mutationId = mutationId, - cacheKey = CacheKey("""hero({"episode":"JEDI"})""".hashed()), + cacheKey = CacheKey("""hero({"episode":"JEDI"})"""), data = data, ).also { store.publish(it) diff --git a/tests/normalized-cache/src/commonTest/kotlin/OtherCacheTest.kt b/tests/normalized-cache/src/commonTest/kotlin/OtherCacheTest.kt index f3d9072d..8399d1bc 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/OtherCacheTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/OtherCacheTest.kt @@ -6,10 +6,10 @@ import com.apollographql.apollo.exception.CacheMissException import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.FetchPolicy +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.IdCacheKeyGenerator import com.apollographql.cache.normalized.api.IdCacheKeyResolver import com.apollographql.cache.normalized.fetchPolicy -import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.store import com.apollographql.mockserver.MockServer @@ -73,7 +73,7 @@ class OtherCacheTest { // Some details are not present in the master query, we should get a cache miss val e = apolloClient.query(CharacterDetailsQuery("1002")).fetchPolicy(FetchPolicy.CacheOnly).execute().exception as CacheMissException - assertTrue(e.message!!.contains("Object '${"Character:1002".hashed()}' has no field named '__typename'")) + assertTrue(e.message!!.contains("Object '${CacheKey("Character:1002").key}' has no field named '__typename'")) } diff --git a/tests/normalized-cache/src/commonTest/kotlin/StoreTest.kt b/tests/normalized-cache/src/commonTest/kotlin/StoreTest.kt index 1236045a..c574e33c 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/StoreTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/StoreTest.kt @@ -11,14 +11,12 @@ import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.IdCacheKeyGenerator import com.apollographql.cache.normalized.api.IdCacheKeyResolver import com.apollographql.cache.normalized.fetchPolicy -import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.isFromCache import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.store import normalizer.CharacterNameByIdQuery import normalizer.HeroAndFriendsNamesWithIDsQuery import normalizer.type.Episode -import kotlin.test.Ignore import kotlin.test.Test import kotlin.test.assertEquals import kotlin.test.assertIs @@ -43,7 +41,7 @@ class StoreTest { assertFriendIsCached("1002", "Han Solo") // remove the root query object - var removed = store.remove(CacheKey("Character:2001".hashed())) + var removed = store.remove(CacheKey("Character:2001")) assertEquals(true, removed) // Trying to get the full response should fail @@ -54,7 +52,7 @@ class StoreTest { assertFriendIsCached("1002", "Han Solo") // remove a single object from the list - removed = store.remove(CacheKey("Character:1002".hashed())) + removed = store.remove(CacheKey("Character:1002")) assertEquals(true, removed) // Trying to get the full response should fail @@ -76,7 +74,7 @@ class StoreTest { assertFriendIsCached("1003", "Leia Organa") // Now remove multiple keys - val removed = store.remove(listOf(CacheKey("Character:1002".hashed()), CacheKey("Character:1000".hashed()))) + val removed = store.remove(listOf(CacheKey("Character:1002"), CacheKey("Character:1000"))) assertEquals(2, removed) @@ -97,7 +95,7 @@ class StoreTest { assertFriendIsCached("1003", "Leia Organa") // test remove root query object - val removed = store.remove(CacheKey("Character:2001".hashed()), true) + val removed = store.remove(CacheKey("Character:2001"), true) assertEquals(true, removed) // Nothing should be cached anymore @@ -107,21 +105,6 @@ class StoreTest { assertFriendIsNotCached("1003") } - @Ignore - @Test - @Throws(Exception::class) - fun directAccess() = runTest(before = { setUp() }) { - // put everything in the cache - storeAllFriends() - - store.accessCache { - it.remove("Character:10%") - } - assertFriendIsNotCached("1000") - assertFriendIsNotCached("1002") - assertFriendIsNotCached("1003") - } - @Test fun testNewBuilderNewStore() = runTest(before = { setUp() }) { storeAllFriends() diff --git a/tests/normalized-cache/src/commonTest/kotlin/ThreadTests.kt b/tests/normalized-cache/src/commonTest/kotlin/ThreadTests.kt index e424371f..29538744 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/ThreadTests.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/ThreadTests.kt @@ -62,28 +62,21 @@ class ThreadTests { return delegate.remove(cacheKeys, cascade) } - override fun remove(pattern: String): Int { - check(currentThreadId() != mainThreadId) { - "Cache access on main thread" - } - return delegate.remove(pattern) - } - - override fun loadRecord(key: String, cacheHeaders: CacheHeaders): Record? { + override fun loadRecord(key: CacheKey, cacheHeaders: CacheHeaders): Record? { check(currentThreadId() != mainThreadId) { "Cache access on main thread" } return delegate.loadRecord(key, cacheHeaders) } - override fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection { + override fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection { check(currentThreadId() != mainThreadId) { "Cache access on main thread" } return delegate.loadRecords(keys, cacheHeaders) } - override fun dump(): Map, Map> { + override fun dump(): Map, Map> { check(currentThreadId() != mainThreadId) { "Cache access on main thread" } diff --git a/tests/normalized-cache/src/commonTest/kotlin/fragmentnormalizer/FragmentNormalizerTest.kt b/tests/normalized-cache/src/commonTest/kotlin/fragmentnormalizer/FragmentNormalizerTest.kt index d464fa15..9f7e9494 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/fragmentnormalizer/FragmentNormalizerTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/fragmentnormalizer/FragmentNormalizerTest.kt @@ -6,7 +6,6 @@ import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.IdCacheKeyGenerator import com.apollographql.cache.normalized.apolloStore -import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.internal.normalized import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.normalizedCache @@ -100,6 +99,6 @@ class FragmentNormalizerTest { cacheKeyGenerator = IdCacheKeyGenerator(), ) - assertContains(records.keys, "1.author".hashed()) + assertContains(records.keys, CacheKey("1.author")) } } diff --git a/tests/normalized-cache/src/concurrentTest/kotlin/MemoryCacheOnlyTest.kt b/tests/normalized-cache/src/concurrentTest/kotlin/MemoryCacheOnlyTest.kt index 481bab89..76c03ef6 100644 --- a/tests/normalized-cache/src/concurrentTest/kotlin/MemoryCacheOnlyTest.kt +++ b/tests/normalized-cache/src/concurrentTest/kotlin/MemoryCacheOnlyTest.kt @@ -5,6 +5,7 @@ import com.apollographql.apollo.testing.enqueueTestResponse import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.FetchPolicy +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.memory.MemoryCache @@ -27,7 +28,7 @@ class MemoryCacheOnlyTest { val query = GetUserQuery() apolloClient.enqueueTestResponse(query, GetUserQuery.Data(GetUserQuery.User("John", "a@a.com"))) apolloClient.query(query).memoryCacheOnly(true).execute() - val dump: Map, Map> = store.dump() + val dump: Map, Map> = store.dump() assertEquals(2, dump[MemoryCache::class]!!.size) assertEquals(0, dump[SqlNormalizedCache::class]!!.size) } diff --git a/tests/normalized-cache/src/jvmTest/kotlin/CacheMissLoggingInterceptorTest.kt b/tests/normalized-cache/src/jvmTest/kotlin/CacheMissLoggingInterceptorTest.kt index 8adfcfe5..2455b189 100644 --- a/tests/normalized-cache/src/jvmTest/kotlin/CacheMissLoggingInterceptorTest.kt +++ b/tests/normalized-cache/src/jvmTest/kotlin/CacheMissLoggingInterceptorTest.kt @@ -3,8 +3,8 @@ package test import com.apollographql.apollo.ApolloClient import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.FetchPolicy +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.fetchPolicy -import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.logCacheMisses import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.normalizedCache @@ -55,7 +55,7 @@ class CacheMissLoggingInterceptorTest { assertEquals( listOf( "Object 'QUERY_ROOT' has no field named 'hero'", - "Object '${"hero".hashed()}' has no field named 'appearsIn'" + "Object '${CacheKey("hero").key}' has no field named 'appearsIn'" ), recordedLogs ) diff --git a/tests/normalized-cache/src/jvmTest/kotlin/WriteToCacheAsynchronouslyTest.kt b/tests/normalized-cache/src/jvmTest/kotlin/WriteToCacheAsynchronouslyTest.kt index 0562426f..755545fb 100644 --- a/tests/normalized-cache/src/jvmTest/kotlin/WriteToCacheAsynchronouslyTest.kt +++ b/tests/normalized-cache/src/jvmTest/kotlin/WriteToCacheAsynchronouslyTest.kt @@ -4,6 +4,7 @@ import com.apollographql.apollo.ApolloClient import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.api.CacheHeaders +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.store import com.apollographql.cache.normalized.writeToCacheAsynchronously @@ -81,6 +82,6 @@ class WriteToCacheAsynchronouslyTest { } companion object { - const val QUERY_ROOT_KEY = "QUERY_ROOT" + val QUERY_ROOT_KEY = CacheKey.rootKey() } } diff --git a/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt b/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt index 2b48c423..c6a25e1c 100644 --- a/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt +++ b/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt @@ -23,7 +23,6 @@ import com.apollographql.cache.normalized.apolloStore import com.apollographql.cache.normalized.fetchFromCache import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.fetchPolicyInterceptor -import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.normalizedCache import com.apollographql.cache.normalized.store @@ -125,7 +124,8 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Object '${"User:1".hashed()}' has no field named 'nickName' in the cache").path(listOf("me", "nickName")) + Error.Builder("Object '${CacheKey("User:1").key}' has no field named 'nickName' in the cache") + .path(listOf("me", "nickName")) .build() ), cacheMissResult.errors @@ -308,7 +308,7 @@ class CachePartialResultTest { ) // Remove project lead from the cache - apolloClient.apolloStore.remove(CacheKey("User:3".hashed())) + apolloClient.apolloStore.remove(CacheKey("User:3")) val cacheResult = apolloClient.query(MeWithBestFriendQuery()) .fetchPolicyInterceptor(PartialCacheOnlyInterceptor) .execute() @@ -344,13 +344,14 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Object '${"User:3".hashed()}' not found in the cache").path(listOf("me", "projects", 0, "lead")).build() + Error.Builder("Object '${CacheKey("User:3").key}' not found in the cache").path(listOf("me", "projects", 0, "lead")) + .build() ), cacheResult.errors ) // Remove best friend from the cache - apolloClient.apolloStore.remove(CacheKey("User:2".hashed())) + apolloClient.apolloStore.remove(CacheKey("User:2")) val cacheResult2 = apolloClient.query(MeWithBestFriendQuery()) .fetchPolicyInterceptor(PartialCacheOnlyInterceptor) .execute() @@ -381,14 +382,15 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Object '${"User:2".hashed()}' not found in the cache").path(listOf("me", "bestFriend")).build(), - Error.Builder("Object '${"User:3".hashed()}' not found in the cache").path(listOf("me", "projects", 0, "lead")).build(), + Error.Builder("Object '${CacheKey("User:2").key}' not found in the cache").path(listOf("me", "bestFriend")).build(), + Error.Builder("Object '${CacheKey("User:3").key}' not found in the cache").path(listOf("me", "projects", 0, "lead")) + .build(), ), cacheResult2.errors ) // Remove project user from the cache - apolloClient.apolloStore.remove(CacheKey("User:4".hashed())) + apolloClient.apolloStore.remove(CacheKey("User:4")) val cacheResult3 = apolloClient.query(MeWithBestFriendQuery()) .fetchPolicyInterceptor(PartialCacheOnlyInterceptor) .execute() @@ -396,9 +398,10 @@ class CachePartialResultTest { assertNull(cacheResult3.data) assertErrorsEquals( listOf( - Error.Builder("Object '${"User:2".hashed()}' not found in the cache").path(listOf("me", "bestFriend")).build(), - Error.Builder("Object '${"User:3".hashed()}' not found in the cache").path(listOf("me", "projects", 0, "lead")).build(), - Error.Builder("Object '${"User:4".hashed()}' not found in the cache").path(listOf("me", "projects", 0, "users", 0)) + Error.Builder("Object '${CacheKey("User:2").key}' not found in the cache").path(listOf("me", "bestFriend")).build(), + Error.Builder("Object '${CacheKey("User:3").key}' not found in the cache").path(listOf("me", "projects", 0, "lead")) + .build(), + Error.Builder("Object '${CacheKey("User:4").key}' not found in the cache").path(listOf("me", "projects", 0, "users", 0)) .build() ), cacheResult3.errors @@ -532,8 +535,8 @@ class CachePartialResultTest { // Remove the category from the cache apolloClient.apolloStore.accessCache { cache -> - val record = cache.loadRecord("User:1".hashed(), CacheHeaders.NONE)!! - cache.remove(CacheKey("User:1".hashed()), false) + val record = cache.loadRecord(CacheKey("User:1"), CacheHeaders.NONE)!! + cache.remove(CacheKey("User:1"), false) cache.merge(Record(record.key, record.fields - "category"), CacheHeaders.NONE, DefaultRecordMerger) } val cacheMissResult = apolloClient.query(UserByCategoryQuery(Category(2, "Second"))) @@ -543,7 +546,8 @@ class CachePartialResultTest { assertNull(cacheMissResult.data) assertErrorsEquals( listOf( - Error.Builder("Object '${"User:1".hashed()}' has no field named 'category' in the cache").path(listOf("user", "category")) + Error.Builder("Object '${CacheKey("User:1").key}' has no field named 'category' in the cache") + .path(listOf("user", "category")) .build() ), cacheMissResult.errors @@ -634,7 +638,7 @@ class CachePartialResultTest { ) // Remove lead from the cache - apolloClient.apolloStore.remove(CacheKey("User:2".hashed())) + apolloClient.apolloStore.remove(CacheKey("User:2")) val cacheMissResult = apolloClient.query(WithFragmentsQuery()) .fetchPolicyInterceptor(PartialCacheOnlyInterceptor) @@ -671,7 +675,8 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Object '${"User:2".hashed()}' not found in the cache").path(listOf("me", "mainProject", "lead0")).build() + Error.Builder("Object '${CacheKey("User:2").key}' not found in the cache").path(listOf("me", "mainProject", "lead0")) + .build() ), cacheMissResult.errors ) @@ -735,7 +740,8 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Field 'nickName' on object '${"User:1".hashed()}' is stale in the cache").path(listOf("me", "nickName")) + Error.Builder("Field 'nickName' on object '${CacheKey("User:1").key}' is stale in the cache") + .path(listOf("me", "nickName")) .build() ), cacheMissResult.errors @@ -800,7 +806,7 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Field 'salary' on object '${"${"User:1".hashed()}.employeeInfo".hashed()}' is stale in the cache") + Error.Builder("Field 'salary' on object '${CacheKey("${CacheKey("User:1").key}.employeeInfo").key}' is stale in the cache") .path(listOf("me", "employeeInfo", "salary")).build() ), cacheMissResult.errors diff --git a/tests/store-errors/src/commonTest/kotlin/test/StoreErrorsTest.kt b/tests/store-errors/src/commonTest/kotlin/test/StoreErrorsTest.kt index 3325e906..ae10fcb8 100644 --- a/tests/store-errors/src/commonTest/kotlin/test/StoreErrorsTest.kt +++ b/tests/store-errors/src/commonTest/kotlin/test/StoreErrorsTest.kt @@ -12,13 +12,13 @@ import com.apollographql.apollo.interceptor.ApolloInterceptorChain import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.FetchPolicy +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.withErrors import com.apollographql.cache.normalized.errorsReplaceCachedValues import com.apollographql.cache.normalized.fetchFromCache import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.fetchPolicyInterceptor -import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.sql.SqlNormalizedCacheFactory import com.apollographql.cache.normalized.store @@ -707,17 +707,17 @@ class StoreErrorsTest { query, listOf(Error.Builder("'nickName' can't be reached").path(listOf("me", "nickName")).build()), ) - val normalized: Map = memoryStore.normalize( + val normalized: Map = memoryStore.normalize( executable = query, dataWithErrors = dataWithErrors, customScalarAdapters = CustomScalarAdapters.Empty, ) - assertEquals("User", normalized["User:1".hashed()]!!["__typename"]) - assertEquals("1", normalized["User:1".hashed()]!!["id"]) - assertEquals("John", normalized["User:1".hashed()]!!["firstName"]) - assertEquals("Smith", normalized["User:1".hashed()]!!["lastName"]) + assertEquals("User", normalized[CacheKey("User:1")]!!["__typename"]) + assertEquals("1", normalized[CacheKey("User:1")]!!["id"]) + assertEquals("John", normalized[CacheKey("User:1")]!!["firstName"]) + assertEquals("Smith", normalized[CacheKey("User:1")]!!["lastName"]) assertErrorsEquals(Error.Builder("'nickName' can't be reached").path(listOf("me", "nickName")) - .build(), normalized["User:1".hashed()]!!["nickName"] as Error + .build(), normalized[CacheKey("User:1")]!!["nickName"] as Error ) } From 515a4639fe23274144ad4a5b9ab8f9233d1b1ee4 Mon Sep 17 00:00:00 2001 From: BoD Date: Wed, 12 Mar 2025 18:17:02 +0100 Subject: [PATCH 03/29] Avoid using .hashed() --- .../api/normalized-cache-incubating.api | 4 ++ .../api/normalized-cache-incubating.klib.api | 1 + .../cache/normalized/api/CacheKey.kt | 6 ++ .../cache/normalized/api/Record.kt | 4 -- .../cache/normalized/internal/Normalizer.kt | 2 +- .../kotlin/DanglingReferencesTest.kt | 14 ++--- .../commonTest/kotlin/GarbageCollectTest.kt | 22 +++---- .../src/commonTest/kotlin/StaleFieldsTest.kt | 62 +++++++++---------- .../src/commonTest/kotlin/NormalizerTest.kt | 26 ++++---- 9 files changed, 74 insertions(+), 67 deletions(-) diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.api b/normalized-cache-incubating/api/normalized-cache-incubating.api index 13c30061..40a86ad5 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.api @@ -263,6 +263,10 @@ public final class com/apollographql/cache/normalized/api/CacheKeyGeneratorConte public final fun getVariables ()Lcom/apollographql/apollo/api/Executable$Variables; } +public final class com/apollographql/cache/normalized/api/CacheKeyKt { + public static final fun fieldKey-eNSUWrY (Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String; +} + public abstract class com/apollographql/cache/normalized/api/CacheKeyResolver : com/apollographql/cache/normalized/api/CacheResolver { public fun ()V public abstract fun cacheKeyForField-fLoEQYY (Lcom/apollographql/cache/normalized/api/ResolverContext;)Ljava/lang/String; diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api index 519b861e..13723295 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api @@ -556,6 +556,7 @@ final val com.apollographql.cache.normalized/isFromCache // com.apollographql.ca final fun (com.apollographql.apollo/ApolloClient.Builder).com.apollographql.cache.normalized/logCacheMisses(kotlin/Function1 = ...): com.apollographql.apollo/ApolloClient.Builder // com.apollographql.cache.normalized/logCacheMisses|logCacheMisses@com.apollographql.apollo.ApolloClient.Builder(kotlin.Function1){}[0] final fun (com.apollographql.apollo/ApolloClient.Builder).com.apollographql.cache.normalized/normalizedCache(com.apollographql.cache.normalized.api/NormalizedCacheFactory, com.apollographql.cache.normalized.api/CacheKeyGenerator = ..., com.apollographql.cache.normalized.api/MetadataGenerator = ..., com.apollographql.cache.normalized.api/CacheResolver = ..., com.apollographql.cache.normalized.api/RecordMerger = ..., com.apollographql.cache.normalized.api/FieldKeyGenerator = ..., com.apollographql.cache.normalized.api/EmbeddedFieldsProvider = ..., kotlin/Boolean = ...): com.apollographql.apollo/ApolloClient.Builder // com.apollographql.cache.normalized/normalizedCache|normalizedCache@com.apollographql.apollo.ApolloClient.Builder(com.apollographql.cache.normalized.api.NormalizedCacheFactory;com.apollographql.cache.normalized.api.CacheKeyGenerator;com.apollographql.cache.normalized.api.MetadataGenerator;com.apollographql.cache.normalized.api.CacheResolver;com.apollographql.cache.normalized.api.RecordMerger;com.apollographql.cache.normalized.api.FieldKeyGenerator;com.apollographql.cache.normalized.api.EmbeddedFieldsProvider;kotlin.Boolean){}[0] final fun (com.apollographql.apollo/ApolloClient.Builder).com.apollographql.cache.normalized/store(com.apollographql.cache.normalized/ApolloStore, kotlin/Boolean = ...): com.apollographql.apollo/ApolloClient.Builder // com.apollographql.cache.normalized/store|store@com.apollographql.apollo.ApolloClient.Builder(com.apollographql.cache.normalized.ApolloStore;kotlin.Boolean){}[0] +final fun (com.apollographql.cache.normalized.api/CacheKey).com.apollographql.cache.normalized.api/fieldKey(kotlin/String): kotlin/String // com.apollographql.cache.normalized.api/fieldKey|fieldKey@com.apollographql.cache.normalized.api.CacheKey(kotlin.String){}[0] final fun (com.apollographql.cache.normalized.api/NormalizedCache).com.apollographql.cache.normalized/allRecords(): kotlin.collections/Map // com.apollographql.cache.normalized/allRecords|allRecords@com.apollographql.cache.normalized.api.NormalizedCache(){}[0] final fun (com.apollographql.cache.normalized.api/NormalizedCache).com.apollographql.cache.normalized/garbageCollect(com.apollographql.cache.normalized.api/MaxAgeProvider, kotlin.time/Duration = ...): com.apollographql.cache.normalized/GarbageCollectResult // com.apollographql.cache.normalized/garbageCollect|garbageCollect@com.apollographql.cache.normalized.api.NormalizedCache(com.apollographql.cache.normalized.api.MaxAgeProvider;kotlin.time.Duration){}[0] final fun (com.apollographql.cache.normalized.api/NormalizedCache).com.apollographql.cache.normalized/removeDanglingReferences(): com.apollographql.cache.normalized/RemovedFieldsAndRecords // com.apollographql.cache.normalized/removeDanglingReferences|removeDanglingReferences@com.apollographql.cache.normalized.api.NormalizedCache(){}[0] diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt index 0f596aa2..75f57949 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt @@ -1,5 +1,6 @@ package com.apollographql.cache.normalized.api +import com.apollographql.apollo.annotations.ApolloInternal import com.apollographql.cache.normalized.internal.hashed import kotlin.jvm.JvmInline import kotlin.jvm.JvmStatic @@ -86,3 +87,8 @@ value class CacheKey private constructor( } } } + +@ApolloInternal +fun CacheKey.fieldKey(fieldName: String): String { + return "$key.$fieldName" +} diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/Record.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/Record.kt index 4618e354..a6ee7c94 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/Record.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/Record.kt @@ -120,7 +120,3 @@ fun Collection?.dependentKeys(): Set { it.fieldKeys() }?.toSet() ?: emptySet() } - -internal fun CacheKey.fieldKey(fieldName: String): String { - return "$key.$fieldName" -} diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt index f446cd5b..c1649544 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt @@ -210,7 +210,7 @@ internal class Normalizer( )?.key if (key == null) { - key = path.hashed() + key = CacheKey(path).key } if (embeddedFields.contains(field.name)) { buildFields(value, key, field.selections, field.type.rawType()) diff --git a/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt b/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt index bc31dca3..238d5bf1 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt @@ -6,8 +6,8 @@ import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.FetchPolicy import com.apollographql.cache.normalized.allRecords import com.apollographql.cache.normalized.api.CacheKey +import com.apollographql.cache.normalized.api.fieldKey import com.apollographql.cache.normalized.fetchPolicy -import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.removeDanglingReferences import com.apollographql.cache.normalized.sql.SqlNormalizedCacheFactory @@ -42,7 +42,7 @@ class DanglingReferencesTest { store.remove(CacheKey("User:1"), cascade = false) val removedFieldsAndRecords = store.removeDanglingReferences() assertEquals( - setOf("${"Repository:0".hashed()}.starGazers"), + setOf(CacheKey("Repository:0").fieldKey("starGazers")), removedFieldsAndRecords.removedFields ) assertEquals( @@ -79,15 +79,15 @@ class DanglingReferencesTest { val removedFieldsAndRecords = store.removeDanglingReferences() assertEquals( setOf( - ("metaProjects.0.0".hashed() + ".type").hashed() + ".owners", - "metaProjects.0.0".hashed() + ".type", - "QUERY_ROOT.metaProjects", + CacheKey(CacheKey("metaProjects.0.0").fieldKey("type")).fieldKey("owners"), + CacheKey("metaProjects.0.0").fieldKey("type"), + CacheKey("QUERY_ROOT").fieldKey("metaProjects"), ), removedFieldsAndRecords.removedFields ) assertEquals( setOf( - CacheKey(("metaProjects.0.0".hashed() + ".type")), + CacheKey(CacheKey("metaProjects.0.0").fieldKey("type")), CacheKey("metaProjects.0.0"), CacheKey("QUERY_ROOT"), ), @@ -96,7 +96,7 @@ class DanglingReferencesTest { val allRecords = store.accessCache { it.allRecords() } assertFalse(allRecords.containsKey(CacheKey("QUERY_ROOT"))) assertFalse(allRecords.containsKey(CacheKey("metaProjects.0.0"))) - assertFalse(allRecords.containsKey(CacheKey("metaProjects.0.0".hashed() + ".type"))) + assertFalse(allRecords.containsKey(CacheKey(CacheKey("metaProjects.0.0").fieldKey("type")))) } } diff --git a/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt b/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt index d458767b..2eaa418c 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt @@ -7,10 +7,10 @@ import com.apollographql.cache.normalized.FetchPolicy import com.apollographql.cache.normalized.allRecords import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.SchemaCoordinatesMaxAgeProvider +import com.apollographql.cache.normalized.api.fieldKey import com.apollographql.cache.normalized.cacheHeaders import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.garbageCollect -import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.sql.SqlNormalizedCacheFactory import com.apollographql.cache.normalized.store @@ -52,27 +52,27 @@ class GarbageCollectTest { val garbageCollectResult = store.garbageCollect(maxAgeProvider) assertEquals( setOf( - ("metaProjects.0.0".hashed() + ".type").hashed() + ".owners", - ("metaProjects.0.1".hashed() + ".type").hashed() + ".owners", - ("metaProjects.1.0".hashed() + ".type").hashed() + ".owners", + CacheKey(CacheKey("metaProjects.0.0").fieldKey("type")).fieldKey("owners"), + CacheKey(CacheKey("metaProjects.0.1").fieldKey("type")).fieldKey("owners"), + CacheKey(CacheKey("metaProjects.1.0").fieldKey("type")).fieldKey("owners"), ), garbageCollectResult.removedStaleFields.removedFields ) assertEquals( setOf( - CacheKey("metaProjects.0.0".hashed() + ".type"), - CacheKey("metaProjects.0.1".hashed() + ".type"), - CacheKey("metaProjects.1.0".hashed() + ".type"), + CacheKey(CacheKey("metaProjects.0.0").fieldKey("type")), + CacheKey(CacheKey("metaProjects.0.1").fieldKey("type")), + CacheKey(CacheKey("metaProjects.1.0").fieldKey("type")), ), garbageCollectResult.removedStaleFields.removedRecords ) assertEquals( setOf( - "metaProjects.0.0".hashed() + ".type", - "metaProjects.0.1".hashed() + ".type", - "metaProjects.1.0".hashed() + ".type", - "QUERY_ROOT.metaProjects", + CacheKey("metaProjects.0.0").fieldKey("type"), + CacheKey("metaProjects.0.1").fieldKey("type"), + CacheKey("metaProjects.1.0").fieldKey("type"), + CacheKey("QUERY_ROOT").fieldKey("metaProjects"), ), garbageCollectResult.removedDanglingReferences.removedFields ) diff --git a/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt b/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt index 3213155d..50b34a5e 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt @@ -11,9 +11,9 @@ import com.apollographql.cache.normalized.api.CacheHeaders import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.GlobalMaxAgeProvider import com.apollographql.cache.normalized.api.SchemaCoordinatesMaxAgeProvider +import com.apollographql.cache.normalized.api.fieldKey import com.apollographql.cache.normalized.cacheHeaders import com.apollographql.cache.normalized.fetchPolicy -import com.apollographql.cache.normalized.internal.hashed import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.removeStaleFields import com.apollographql.cache.normalized.sql.SqlNormalizedCacheFactory @@ -59,8 +59,8 @@ class StaleFieldsTest { // Repository.stars has a max age of 60 seconds, so they should be removed / User has a max age of 90 seconds, so Repository.starGazers should be kept assertEquals( setOf( - "Repository:0".hashed() + ".stars", - "Repository:1".hashed() + ".stars", + CacheKey("Repository:0").fieldKey("stars"), + CacheKey("Repository:1").fieldKey("stars"), ), removedFieldsAndRecords.removedFields ) assertEquals( @@ -81,10 +81,10 @@ class StaleFieldsTest { // Repository.stars and Repository.starGazers should be removed assertEquals( setOf( - "Repository:0".hashed() + ".stars", - "Repository:0".hashed() + ".starGazers", - "Repository:1".hashed() + ".stars", - "Repository:1".hashed() + ".starGazers", + CacheKey("Repository:0").fieldKey("stars"), + CacheKey("Repository:0").fieldKey("starGazers"), + CacheKey("Repository:1").fieldKey("stars"), + CacheKey("Repository:1").fieldKey("starGazers"), ), removedFieldsAndRecords.removedFields ) assertEquals( @@ -127,8 +127,8 @@ class StaleFieldsTest { // Project.velocity has a max age of 60 seconds, so they should be removed / Project.isUrgent has a max age of 90 seconds, so they should be kept assertEquals( setOf( - "projects.0".hashed() + ".velocity", - "projects.1".hashed() + ".velocity", + CacheKey("projects.0").fieldKey("velocity"), + CacheKey("projects.1").fieldKey("velocity"), ), removedFieldsAndRecords.removedFields ) assertEquals( @@ -149,10 +149,10 @@ class StaleFieldsTest { // Project.velocity and Project.isUrgent should be removed, their records being empty they should be removed assertEquals( setOf( - "projects.0".hashed() + ".velocity", - "projects.0".hashed() + ".isUrgent", - "projects.1".hashed() + ".velocity", - "projects.1".hashed() + ".isUrgent", + CacheKey("projects.0").fieldKey("velocity"), + CacheKey("projects.0").fieldKey("isUrgent"), + CacheKey("projects.1").fieldKey("velocity"), + CacheKey("projects.1").fieldKey("isUrgent"), ), removedFieldsAndRecords.removedFields ) assertEquals( @@ -192,24 +192,24 @@ class StaleFieldsTest { // Everything is stale assertEquals( setOf( - "Repository:0".hashed() + ".__typename", - "Repository:0".hashed() + ".id", - "Repository:0".hashed() + ".stars", - "Repository:0".hashed() + ".starGazers", - "User:0".hashed() + ".__typename", - "User:0".hashed() + ".id", - "User:0".hashed() + ".name", - "Repository:1".hashed() + ".__typename", - "Repository:1".hashed() + ".id", - "Repository:1".hashed() + ".stars", - "Repository:1".hashed() + ".starGazers", - "User:2".hashed() + ".__typename", - "User:2".hashed() + ".id", - "User:2".hashed() + ".name", - "QUERY_ROOT.repositories({\"first\":15})", - "User:1".hashed() + ".__typename", - "User:1".hashed() + ".id", - "User:1".hashed() + ".name" + CacheKey("Repository:0").fieldKey("__typename"), + CacheKey("Repository:0").fieldKey("id"), + CacheKey("Repository:0").fieldKey("stars"), + CacheKey("Repository:0").fieldKey("starGazers"), + CacheKey("User:0").fieldKey("__typename"), + CacheKey("User:0").fieldKey("id"), + CacheKey("User:0").fieldKey("name"), + CacheKey("Repository:1").fieldKey("__typename"), + CacheKey("Repository:1").fieldKey("id"), + CacheKey("Repository:1").fieldKey("stars"), + CacheKey("Repository:1").fieldKey("starGazers"), + CacheKey("User:2").fieldKey("__typename"), + CacheKey("User:2").fieldKey("id"), + CacheKey("User:2").fieldKey("name"), + CacheKey("QUERY_ROOT").fieldKey("repositories({\"first\":15})"), + CacheKey("User:1").fieldKey("__typename"), + CacheKey("User:1").fieldKey("id"), + CacheKey("User:1").fieldKey("name"), ), removedFieldsAndRecords.removedFields ) assertEquals( diff --git a/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt b/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt index d78f7e29..5b0ef87b 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt @@ -8,7 +8,7 @@ import com.apollographql.cache.normalized.api.DefaultRecordMerger import com.apollographql.cache.normalized.api.IdCacheKeyGenerator import com.apollographql.cache.normalized.api.NormalizedCache import com.apollographql.cache.normalized.api.Record -import com.apollographql.cache.normalized.internal.hashed +import com.apollographql.cache.normalized.api.fieldKey import com.apollographql.cache.normalized.internal.normalized import com.apollographql.cache.normalized.memory.MemoryCacheFactory import httpcache.AllPlanetsQuery @@ -106,13 +106,13 @@ class NormalizerTest { assertEquals(heroRecord!!["name"], "R2-D2") assertEquals( listOf( - CacheKey("${TEST_FIELD_KEY_JEDI.hashed()}.friends.0"), - CacheKey("${TEST_FIELD_KEY_JEDI.hashed()}.friends.1"), - CacheKey("${TEST_FIELD_KEY_JEDI.hashed()}.friends.2") + CacheKey(CacheKey(TEST_FIELD_KEY_JEDI).fieldKey("friends.0")), + CacheKey(CacheKey(TEST_FIELD_KEY_JEDI).fieldKey("friends.1")), + CacheKey(CacheKey(TEST_FIELD_KEY_JEDI).fieldKey("friends.2")), ), heroRecord["friends"] ) - val luke = records.get(CacheKey("${TEST_FIELD_KEY_JEDI.hashed()}.friends.0")) + val luke = records.get(CacheKey(CacheKey(TEST_FIELD_KEY_JEDI).fieldKey("friends.0"))) assertEquals(luke!!["name"], "Luke Skywalker") } @@ -207,7 +207,7 @@ class NormalizerTest { @Throws(Exception::class) fun testHeroParentTypeDependentFieldDroid() { val records = records(HeroParentTypeDependentFieldQuery(Episode.JEDI), "HeroParentTypeDependentFieldDroidResponse.json") - val lukeRecord = records.get(CacheKey(TEST_FIELD_KEY_JEDI.hashed() + ".friends.0")) + val lukeRecord = records.get(CacheKey(CacheKey(TEST_FIELD_KEY_JEDI).fieldKey("friends.0"))) assertEquals(lukeRecord!!["name"], "Luke Skywalker") assertEquals(lukeRecord["height({\"unit\":\"METER\"})"], 1.72) @@ -215,21 +215,21 @@ class NormalizerTest { val friends = records[CacheKey(TEST_FIELD_KEY_JEDI)]!!["friends"] assertIs>(friends) - assertEquals(friends[0], CacheKey((TEST_FIELD_KEY_JEDI.hashed() + ".friends.0"))) - assertEquals(friends[1], CacheKey((TEST_FIELD_KEY_JEDI.hashed() + ".friends.1"))) - assertEquals(friends[2], CacheKey((TEST_FIELD_KEY_JEDI.hashed() + ".friends.2"))) + assertEquals(friends[0], CacheKey(CacheKey(TEST_FIELD_KEY_JEDI).fieldKey("friends.0"))) + assertEquals(friends[1], CacheKey(CacheKey(TEST_FIELD_KEY_JEDI).fieldKey("friends.1"))) + assertEquals(friends[2], CacheKey(CacheKey(TEST_FIELD_KEY_JEDI).fieldKey("friends.2"))) } @Test fun list_of_objects_with_null_object() { val records = records(AllPlanetsQuery(), "AllPlanetsListOfObjectWithNullObject.json") - val fieldKey = "allPlanets({\"first\":300})".hashed() + val fieldKey = CacheKey("allPlanets({\"first\":300})").key var record: Record? = records[CacheKey("$fieldKey.planets.0")] assertTrue(record?.get("filmConnection") == null) - record = records.get(CacheKey("${"$fieldKey.planets.0".hashed()}.filmConnection")) + record = records.get(CacheKey(CacheKey("$fieldKey.planets.0").fieldKey("filmConnection"))) assertTrue(record == null) - record = records.get(CacheKey("${"$fieldKey.planets.1".hashed()}.filmConnection")) + record = records.get(CacheKey(CacheKey("$fieldKey.planets.1").fieldKey("filmConnection"))) assertTrue(record != null) } @@ -239,7 +239,7 @@ class NormalizerTest { fun testHeroParentTypeDependentFieldHuman() { val records = records(HeroParentTypeDependentFieldQuery(Episode.EMPIRE), "HeroParentTypeDependentFieldHumanResponse.json") - val lukeRecord = records.get(CacheKey("${TEST_FIELD_KEY_EMPIRE.hashed()}.friends.0")) + val lukeRecord = records.get(CacheKey(CacheKey(TEST_FIELD_KEY_EMPIRE).fieldKey("friends.0"))) assertEquals(lukeRecord!!["name"], "Han Solo") assertEquals(lukeRecord["height({\"unit\":\"FOOT\"})"], 5.905512) } From 46b783be303240b4c6cb86f9dfb1741704844480 Mon Sep 17 00:00:00 2001 From: BoD Date: Thu, 9 Jan 2025 14:39:50 +0100 Subject: [PATCH 04/29] New 'fields' db format. Also, remove EVICT_AFTER_READ. --- .../normalized/api/ApolloCacheHeaders.kt | 4 +- .../cache/normalized/memory/MemoryCache.kt | 12 +- .../cache/normalized/MemoryCacheTest.kt | 17 +- .../normalized-cache-sqlite-incubating.api | 121 ++++---------- .../normalized-cache-sqlite-incubating.api | 121 ++++---------- .../build.gradle.kts | 16 +- .../sqldelight/blob2/schema/1.db | Bin 12288 -> 0 bytes .../sqldelight/{blob => fields}/schema/1.db | Bin 8192 -> 8192 bytes .../sqldelight/{blob => fields}/schema/2.db | Bin 8192 -> 8192 bytes .../normalized/sql/SqlNormalizedCache.kt | 153 ++++++------------ ...izer.kt => ApolloJsonElementSerializer.kt} | 78 +++------ .../sql/internal/Blob2RecordDatabase.kt | 67 -------- .../sql/internal/BlobRecordDatabase.kt | 61 ------- .../normalized/sql/internal/RecordDatabase.kt | 135 ++++++++++++---- .../normalized/sql/internal/factoryHelpers.kt | 8 +- .../normalized/sql/internal/blob/blob.sq | 35 ---- .../sqldelight/blob/migrations/1.sqm | 7 - .../normalized/sql/internal/blob2/blob2.sq | 45 ------ .../normalized/sql/internal/fields/fields.sq | 41 +++++ .../sqldelight/fields/com/migrations/1.sqm | 13 ++ .../normalized/sql/SqlNormalizedCacheTest.kt | 39 +---- .../kotlin/DanglingReferencesTest.kt | 27 +++- .../commonTest/kotlin/GarbageCollectTest.kt | 12 +- .../kotlin/ReachableCacheKeysTest.kt | 25 +-- .../src/commonTest/kotlin/StaleFieldsTest.kt | 39 ++++- .../src/commonTest/kotlin/CacheFlagsTest.kt | 24 --- 26 files changed, 376 insertions(+), 724 deletions(-) delete mode 100644 normalized-cache-sqlite-incubating/sqldelight/blob2/schema/1.db rename normalized-cache-sqlite-incubating/sqldelight/{blob => fields}/schema/1.db (96%) rename normalized-cache-sqlite-incubating/sqldelight/{blob => fields}/schema/2.db (96%) rename normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/{BlobRecordSerializer.kt => ApolloJsonElementSerializer.kt} (68%) delete mode 100644 normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/Blob2RecordDatabase.kt delete mode 100644 normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordDatabase.kt delete mode 100644 normalized-cache-sqlite-incubating/src/commonMain/sqldelight/blob/com/apollographql/cache/normalized/sql/internal/blob/blob.sq delete mode 100644 normalized-cache-sqlite-incubating/src/commonMain/sqldelight/blob/migrations/1.sqm delete mode 100644 normalized-cache-sqlite-incubating/src/commonMain/sqldelight/blob2/com/apollographql/cache/normalized/sql/internal/blob2/blob2.sq create mode 100644 normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/apollographql/cache/normalized/sql/internal/fields/fields.sq create mode 100644 normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/migrations/1.sqm diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/ApolloCacheHeaders.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/ApolloCacheHeaders.kt index f089fe5d..65c72f16 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/ApolloCacheHeaders.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/ApolloCacheHeaders.kt @@ -14,9 +14,7 @@ object ApolloCacheHeaders { */ const val MEMORY_CACHE_ONLY = "memory-cache-only" - /** - * Records from this request should be evicted after being read. - */ + @Deprecated(level = DeprecationLevel.ERROR, message = "This header has no effect and will be removed in a future release. Use ApolloStore.remove() instead.") const val EVICT_AFTER_READ = "evict-after-read" /** diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt index bf5fd586..f7f20f5e 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt @@ -48,14 +48,14 @@ class MemoryCache( get() = lockRead { lruCache.weight() } override fun loadRecord(key: String, cacheHeaders: CacheHeaders): Record? = lockRead { - val record = internalLoadRecord(key, cacheHeaders) + val record = lruCache[key] record ?: nextCache?.loadRecord(key, cacheHeaders)?.also { nextCachedRecord -> lruCache[key] = nextCachedRecord } } override fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection = lockRead { - val recordsByKey: Map = keys.associateWith { key -> internalLoadRecord(key, cacheHeaders) } + val recordsByKey: Map = keys.associateWith { key -> lruCache[key] } val missingKeys = recordsByKey.filterValues { it == null }.keys val nextCachedRecords = nextCache?.loadRecords(missingKeys, cacheHeaders).orEmpty() for (record in nextCachedRecords) { @@ -64,14 +64,6 @@ class MemoryCache( recordsByKey.values.filterNotNull() + nextCachedRecords } - private fun internalLoadRecord(key: String, cacheHeaders: CacheHeaders): Record? { - return lruCache[key]?.also { - if (cacheHeaders.hasHeader(ApolloCacheHeaders.EVICT_AFTER_READ)) { - lruCache.remove(key) - } - } - } - override fun clearAll() { lockWrite { lruCache.clear() diff --git a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/MemoryCacheTest.kt b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/MemoryCacheTest.kt index d606fd1f..a3d256bb 100644 --- a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/MemoryCacheTest.kt +++ b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/MemoryCacheTest.kt @@ -4,9 +4,9 @@ import com.apollographql.cache.normalized.api.ApolloCacheHeaders import com.apollographql.cache.normalized.api.CacheHeaders import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.DefaultRecordMerger -import com.apollographql.cache.normalized.memory.MemoryCache import com.apollographql.cache.normalized.api.NormalizedCache import com.apollographql.cache.normalized.api.Record +import com.apollographql.cache.normalized.memory.MemoryCache import kotlin.test.Test import kotlin.test.assertEquals import kotlin.test.assertNotNull @@ -185,21 +185,6 @@ class MemoryCacheTest { assertEquals(testRecord.fields, primaryCache.loadRecord(testRecord.key, CacheHeaders.NONE)?.fields) } - - // Tests for StandardCacheHeader compliance. - @Test - fun testHeader_evictAfterRead() { - val lruCache = createCache() - val testRecord = createTestRecord("1") - - lruCache.merge(testRecord, CacheHeaders.NONE, DefaultRecordMerger) - - val headers = CacheHeaders.builder().addHeader(ApolloCacheHeaders.EVICT_AFTER_READ, "true").build() - - assertNotNull(lruCache.loadRecord(testRecord.key, headers)) - assertNull(lruCache.loadRecord(testRecord.key, headers)) - } - @Test fun testHeader_noCache() { val lruCache = createCache() diff --git a/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api b/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api index 131ffa74..cd21ed2e 100644 --- a/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api +++ b/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api @@ -38,113 +38,48 @@ public final class com/apollographql/cache/normalized/sql/VersionKt { public static final field VERSION Ljava/lang/String; } -public abstract interface class com/apollographql/cache/normalized/sql/internal/blob/BlobDatabase : app/cash/sqldelight/Transacter { - public static final field Companion Lcom/apollographql/cache/normalized/sql/internal/blob/BlobDatabase$Companion; - public abstract fun getBlobQueries ()Lcom/apollographql/cache/normalized/sql/internal/blob/BlobQueries; -} - -public final class com/apollographql/cache/normalized/sql/internal/blob/BlobDatabase$Companion { - public final fun getSchema ()Lapp/cash/sqldelight/db/SqlSchema; - public final fun invoke (Lapp/cash/sqldelight/db/SqlDriver;)Lcom/apollographql/cache/normalized/sql/internal/blob/BlobDatabase; -} - -public final class com/apollographql/cache/normalized/sql/internal/blob/BlobQueries : app/cash/sqldelight/TransacterImpl { - public fun (Lapp/cash/sqldelight/db/SqlDriver;)V - public final fun changes ()Lapp/cash/sqldelight/ExecutableQuery; - public final fun delete (Ljava/lang/String;)V - public final fun deleteAll ()V - public final fun deleteRecords (Ljava/util/Collection;)V - public final fun deleteRecordsWithKeyMatching (Ljava/lang/String;Ljava/lang/String;)V - public final fun insert (Ljava/lang/String;[B)V - public final fun recordForKey (Ljava/lang/String;)Lapp/cash/sqldelight/Query; - public final fun recordForKey (Ljava/lang/String;Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; - public final fun recordsForKeys (Ljava/util/Collection;)Lapp/cash/sqldelight/Query; - public final fun recordsForKeys (Ljava/util/Collection;Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; - public final fun selectRecords ()Lapp/cash/sqldelight/Query; - public final fun selectRecords (Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; - public final fun update ([BLjava/lang/String;)V -} - -public final class com/apollographql/cache/normalized/sql/internal/blob/Blobs { - public fun (Ljava/lang/String;[B)V +public final class com/apollographql/cache/normalized/sql/internal/fields/Field_ { + public fun (Ljava/lang/String;Ljava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)V public final fun component1 ()Ljava/lang/String; - public final fun component2 ()[B - public final fun copy (Ljava/lang/String;[B)Lcom/apollographql/cache/normalized/sql/internal/blob/Blobs; - public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/blob/Blobs;Ljava/lang/String;[BILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/blob/Blobs; + public final fun component2 ()Ljava/lang/String; + public final fun component3 ()[B + public final fun component4 ()[B + public final fun component5 ()Ljava/lang/Long; + public final fun component6 ()Ljava/lang/Long; + public final fun copy (Ljava/lang/String;Ljava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)Lcom/apollographql/cache/normalized/sql/internal/fields/Field_; + public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/fields/Field_;Ljava/lang/String;Ljava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/fields/Field_; public fun equals (Ljava/lang/Object;)Z - public final fun getBlob ()[B + public final fun getExpiration_date ()Ljava/lang/Long; + public final fun getField_ ()Ljava/lang/String; public final fun getKey ()Ljava/lang/String; + public final fun getMetadata ()[B + public final fun getReceived_date ()Ljava/lang/Long; + public final fun getValue_ ()[B public fun hashCode ()I public fun toString ()Ljava/lang/String; } -public abstract interface class com/apollographql/cache/normalized/sql/internal/blob2/Blob2Database : app/cash/sqldelight/Transacter { - public static final field Companion Lcom/apollographql/cache/normalized/sql/internal/blob2/Blob2Database$Companion; - public abstract fun getBlob2Queries ()Lcom/apollographql/cache/normalized/sql/internal/blob2/Blob2Queries; +public abstract interface class com/apollographql/cache/normalized/sql/internal/fields/FieldsDatabase : app/cash/sqldelight/Transacter { + public static final field Companion Lcom/apollographql/cache/normalized/sql/internal/fields/FieldsDatabase$Companion; + public abstract fun getFieldsQueries ()Lcom/apollographql/cache/normalized/sql/internal/fields/FieldsQueries; } -public final class com/apollographql/cache/normalized/sql/internal/blob2/Blob2Database$Companion { +public final class com/apollographql/cache/normalized/sql/internal/fields/FieldsDatabase$Companion { public final fun getSchema ()Lapp/cash/sqldelight/db/SqlSchema; - public final fun invoke (Lapp/cash/sqldelight/db/SqlDriver;)Lcom/apollographql/cache/normalized/sql/internal/blob2/Blob2Database; + public final fun invoke (Lapp/cash/sqldelight/db/SqlDriver;)Lcom/apollographql/cache/normalized/sql/internal/fields/FieldsDatabase; } -public final class com/apollographql/cache/normalized/sql/internal/blob2/Blob2Queries : app/cash/sqldelight/TransacterImpl { +public final class com/apollographql/cache/normalized/sql/internal/fields/FieldsQueries : app/cash/sqldelight/TransacterImpl { public fun (Lapp/cash/sqldelight/db/SqlDriver;)V public final fun changes ()Lapp/cash/sqldelight/ExecutableQuery; - public final fun count ()Lapp/cash/sqldelight/Query; - public final fun delete (Ljava/lang/String;)V - public final fun deleteAll ()V + public final fun deleteAllRecords ()V public final fun deleteRecords (Ljava/util/Collection;)V - public final fun deleteRecordsWithKeyMatching (Ljava/lang/String;Ljava/lang/String;)V - public final fun insert (Ljava/lang/String;[BLjava/lang/Long;)V - public final fun recordForKey (Ljava/lang/String;)Lapp/cash/sqldelight/Query; - public final fun recordForKey (Ljava/lang/String;Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; - public final fun recordsForKeys (Ljava/util/Collection;)Lapp/cash/sqldelight/Query; - public final fun recordsForKeys (Ljava/util/Collection;Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; - public final fun selectRecords ()Lapp/cash/sqldelight/Query; - public final fun selectRecords (Lkotlin/jvm/functions/Function3;)Lapp/cash/sqldelight/Query; - public final fun trim (J)V - public final fun update ([BLjava/lang/Long;Ljava/lang/String;)V -} - -public final class com/apollographql/cache/normalized/sql/internal/blob2/Blobs { - public fun (Ljava/lang/String;[BLjava/lang/Long;)V - public final fun component1 ()Ljava/lang/String; - public final fun component2 ()[B - public final fun component3 ()Ljava/lang/Long; - public final fun copy (Ljava/lang/String;[BLjava/lang/Long;)Lcom/apollographql/cache/normalized/sql/internal/blob2/Blobs; - public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/blob2/Blobs;Ljava/lang/String;[BLjava/lang/Long;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/blob2/Blobs; - public fun equals (Ljava/lang/Object;)Z - public final fun getBlob ()[B - public final fun getDate ()Ljava/lang/Long; - public final fun getKey ()Ljava/lang/String; - public fun hashCode ()I - public fun toString ()Ljava/lang/String; -} - -public final class com/apollographql/cache/normalized/sql/internal/blob2/RecordForKey { - public fun (Ljava/lang/String;[B)V - public final fun component1 ()Ljava/lang/String; - public final fun component2 ()[B - public final fun copy (Ljava/lang/String;[B)Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordForKey; - public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordForKey;Ljava/lang/String;[BILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordForKey; - public fun equals (Ljava/lang/Object;)Z - public final fun getBlob ()[B - public final fun getKey ()Ljava/lang/String; - public fun hashCode ()I - public fun toString ()Ljava/lang/String; -} - -public final class com/apollographql/cache/normalized/sql/internal/blob2/RecordsForKeys { - public fun (Ljava/lang/String;[B)V - public final fun component1 ()Ljava/lang/String; - public final fun component2 ()[B - public final fun copy (Ljava/lang/String;[B)Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordsForKeys; - public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordsForKeys;Ljava/lang/String;[BILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordsForKeys; - public fun equals (Ljava/lang/Object;)Z - public final fun getBlob ()[B - public final fun getKey ()Ljava/lang/String; - public fun hashCode ()I - public fun toString ()Ljava/lang/String; + public final fun deleteRecordsMatching (Ljava/lang/String;)V + public final fun insertOrUpdateField (Ljava/lang/String;Ljava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)V + public final fun selectAllRecords ()Lapp/cash/sqldelight/Query; + public final fun selectAllRecords (Lkotlin/jvm/functions/Function6;)Lapp/cash/sqldelight/Query; + public final fun selectRecords (Ljava/util/Collection;)Lapp/cash/sqldelight/Query; + public final fun selectRecords (Ljava/util/Collection;Lkotlin/jvm/functions/Function6;)Lapp/cash/sqldelight/Query; + public final fun trimByReceivedDate (J)V } diff --git a/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api b/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api index 5ec64ca0..a2269598 100644 --- a/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api +++ b/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api @@ -27,113 +27,48 @@ public final class com/apollographql/cache/normalized/sql/VersionKt { public static final field VERSION Ljava/lang/String; } -public abstract interface class com/apollographql/cache/normalized/sql/internal/blob/BlobDatabase : app/cash/sqldelight/Transacter { - public static final field Companion Lcom/apollographql/cache/normalized/sql/internal/blob/BlobDatabase$Companion; - public abstract fun getBlobQueries ()Lcom/apollographql/cache/normalized/sql/internal/blob/BlobQueries; -} - -public final class com/apollographql/cache/normalized/sql/internal/blob/BlobDatabase$Companion { - public final fun getSchema ()Lapp/cash/sqldelight/db/SqlSchema; - public final fun invoke (Lapp/cash/sqldelight/db/SqlDriver;)Lcom/apollographql/cache/normalized/sql/internal/blob/BlobDatabase; -} - -public final class com/apollographql/cache/normalized/sql/internal/blob/BlobQueries : app/cash/sqldelight/TransacterImpl { - public fun (Lapp/cash/sqldelight/db/SqlDriver;)V - public final fun changes ()Lapp/cash/sqldelight/ExecutableQuery; - public final fun delete (Ljava/lang/String;)V - public final fun deleteAll ()V - public final fun deleteRecords (Ljava/util/Collection;)V - public final fun deleteRecordsWithKeyMatching (Ljava/lang/String;Ljava/lang/String;)V - public final fun insert (Ljava/lang/String;[B)V - public final fun recordForKey (Ljava/lang/String;)Lapp/cash/sqldelight/Query; - public final fun recordForKey (Ljava/lang/String;Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; - public final fun recordsForKeys (Ljava/util/Collection;)Lapp/cash/sqldelight/Query; - public final fun recordsForKeys (Ljava/util/Collection;Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; - public final fun selectRecords ()Lapp/cash/sqldelight/Query; - public final fun selectRecords (Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; - public final fun update ([BLjava/lang/String;)V -} - -public final class com/apollographql/cache/normalized/sql/internal/blob/Blobs { - public fun (Ljava/lang/String;[B)V +public final class com/apollographql/cache/normalized/sql/internal/fields/Field_ { + public fun (Ljava/lang/String;Ljava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)V public final fun component1 ()Ljava/lang/String; - public final fun component2 ()[B - public final fun copy (Ljava/lang/String;[B)Lcom/apollographql/cache/normalized/sql/internal/blob/Blobs; - public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/blob/Blobs;Ljava/lang/String;[BILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/blob/Blobs; + public final fun component2 ()Ljava/lang/String; + public final fun component3 ()[B + public final fun component4 ()[B + public final fun component5 ()Ljava/lang/Long; + public final fun component6 ()Ljava/lang/Long; + public final fun copy (Ljava/lang/String;Ljava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)Lcom/apollographql/cache/normalized/sql/internal/fields/Field_; + public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/fields/Field_;Ljava/lang/String;Ljava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/fields/Field_; public fun equals (Ljava/lang/Object;)Z - public final fun getBlob ()[B + public final fun getExpiration_date ()Ljava/lang/Long; + public final fun getField_ ()Ljava/lang/String; public final fun getKey ()Ljava/lang/String; + public final fun getMetadata ()[B + public final fun getReceived_date ()Ljava/lang/Long; + public final fun getValue_ ()[B public fun hashCode ()I public fun toString ()Ljava/lang/String; } -public abstract interface class com/apollographql/cache/normalized/sql/internal/blob2/Blob2Database : app/cash/sqldelight/Transacter { - public static final field Companion Lcom/apollographql/cache/normalized/sql/internal/blob2/Blob2Database$Companion; - public abstract fun getBlob2Queries ()Lcom/apollographql/cache/normalized/sql/internal/blob2/Blob2Queries; +public abstract interface class com/apollographql/cache/normalized/sql/internal/fields/FieldsDatabase : app/cash/sqldelight/Transacter { + public static final field Companion Lcom/apollographql/cache/normalized/sql/internal/fields/FieldsDatabase$Companion; + public abstract fun getFieldsQueries ()Lcom/apollographql/cache/normalized/sql/internal/fields/FieldsQueries; } -public final class com/apollographql/cache/normalized/sql/internal/blob2/Blob2Database$Companion { +public final class com/apollographql/cache/normalized/sql/internal/fields/FieldsDatabase$Companion { public final fun getSchema ()Lapp/cash/sqldelight/db/SqlSchema; - public final fun invoke (Lapp/cash/sqldelight/db/SqlDriver;)Lcom/apollographql/cache/normalized/sql/internal/blob2/Blob2Database; + public final fun invoke (Lapp/cash/sqldelight/db/SqlDriver;)Lcom/apollographql/cache/normalized/sql/internal/fields/FieldsDatabase; } -public final class com/apollographql/cache/normalized/sql/internal/blob2/Blob2Queries : app/cash/sqldelight/TransacterImpl { +public final class com/apollographql/cache/normalized/sql/internal/fields/FieldsQueries : app/cash/sqldelight/TransacterImpl { public fun (Lapp/cash/sqldelight/db/SqlDriver;)V public final fun changes ()Lapp/cash/sqldelight/ExecutableQuery; - public final fun count ()Lapp/cash/sqldelight/Query; - public final fun delete (Ljava/lang/String;)V - public final fun deleteAll ()V + public final fun deleteAllRecords ()V public final fun deleteRecords (Ljava/util/Collection;)V - public final fun deleteRecordsWithKeyMatching (Ljava/lang/String;Ljava/lang/String;)V - public final fun insert (Ljava/lang/String;[BLjava/lang/Long;)V - public final fun recordForKey (Ljava/lang/String;)Lapp/cash/sqldelight/Query; - public final fun recordForKey (Ljava/lang/String;Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; - public final fun recordsForKeys (Ljava/util/Collection;)Lapp/cash/sqldelight/Query; - public final fun recordsForKeys (Ljava/util/Collection;Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; - public final fun selectRecords ()Lapp/cash/sqldelight/Query; - public final fun selectRecords (Lkotlin/jvm/functions/Function3;)Lapp/cash/sqldelight/Query; - public final fun trim (J)V - public final fun update ([BLjava/lang/Long;Ljava/lang/String;)V -} - -public final class com/apollographql/cache/normalized/sql/internal/blob2/Blobs { - public fun (Ljava/lang/String;[BLjava/lang/Long;)V - public final fun component1 ()Ljava/lang/String; - public final fun component2 ()[B - public final fun component3 ()Ljava/lang/Long; - public final fun copy (Ljava/lang/String;[BLjava/lang/Long;)Lcom/apollographql/cache/normalized/sql/internal/blob2/Blobs; - public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/blob2/Blobs;Ljava/lang/String;[BLjava/lang/Long;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/blob2/Blobs; - public fun equals (Ljava/lang/Object;)Z - public final fun getBlob ()[B - public final fun getDate ()Ljava/lang/Long; - public final fun getKey ()Ljava/lang/String; - public fun hashCode ()I - public fun toString ()Ljava/lang/String; -} - -public final class com/apollographql/cache/normalized/sql/internal/blob2/RecordForKey { - public fun (Ljava/lang/String;[B)V - public final fun component1 ()Ljava/lang/String; - public final fun component2 ()[B - public final fun copy (Ljava/lang/String;[B)Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordForKey; - public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordForKey;Ljava/lang/String;[BILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordForKey; - public fun equals (Ljava/lang/Object;)Z - public final fun getBlob ()[B - public final fun getKey ()Ljava/lang/String; - public fun hashCode ()I - public fun toString ()Ljava/lang/String; -} - -public final class com/apollographql/cache/normalized/sql/internal/blob2/RecordsForKeys { - public fun (Ljava/lang/String;[B)V - public final fun component1 ()Ljava/lang/String; - public final fun component2 ()[B - public final fun copy (Ljava/lang/String;[B)Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordsForKeys; - public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordsForKeys;Ljava/lang/String;[BILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordsForKeys; - public fun equals (Ljava/lang/Object;)Z - public final fun getBlob ()[B - public final fun getKey ()Ljava/lang/String; - public fun hashCode ()I - public fun toString ()Ljava/lang/String; + public final fun deleteRecordsMatching (Ljava/lang/String;)V + public final fun insertOrUpdateField (Ljava/lang/String;Ljava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)V + public final fun selectAllRecords ()Lapp/cash/sqldelight/Query; + public final fun selectAllRecords (Lkotlin/jvm/functions/Function6;)Lapp/cash/sqldelight/Query; + public final fun selectRecords (Ljava/util/Collection;)Lapp/cash/sqldelight/Query; + public final fun selectRecords (Ljava/util/Collection;Lkotlin/jvm/functions/Function6;)Lapp/cash/sqldelight/Query; + public final fun trimByReceivedDate (J)V } diff --git a/normalized-cache-sqlite-incubating/build.gradle.kts b/normalized-cache-sqlite-incubating/build.gradle.kts index 762a70e9..3dfc94f1 100644 --- a/normalized-cache-sqlite-incubating/build.gradle.kts +++ b/normalized-cache-sqlite-incubating/build.gradle.kts @@ -29,17 +29,11 @@ android { testOptions.targetSdk = 30 } - -configure { - databases.create("BlobDatabase") { - packageName.set("com.apollographql.cache.normalized.sql.internal.blob") - schemaOutputDirectory.set(file("sqldelight/blob/schema")) - srcDirs.setFrom("src/commonMain/sqldelight/blob/") - } - databases.create("Blob2Database") { - packageName.set("com.apollographql.cache.normalized.sql.internal.blob2") - schemaOutputDirectory.set(file("sqldelight/blob2/schema")) - srcDirs.setFrom("src/commonMain/sqldelight/blob2/") +sqldelight { + databases.create("FieldsDatabase") { + packageName.set("com.apollographql.cache.normalized.sql.internal.fields") + schemaOutputDirectory.set(file("sqldelight/fields/schema")) + srcDirs.setFrom("src/commonMain/sqldelight/fields/") } } diff --git a/normalized-cache-sqlite-incubating/sqldelight/blob2/schema/1.db b/normalized-cache-sqlite-incubating/sqldelight/blob2/schema/1.db deleted file mode 100644 index fdeea28e30532bc35b189de753b34f3aea5fdd52..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12288 zcmeI#&r8EF6bJAm6NQ3ox15A`+(5i|9@x5|#oB01*>;kaMkr1Pvxf;@_NVxtnp7)Z zJnUHb23~%>_h|Us`f#7rjismc=A~%poQ)Xg?1G3fCR_|$bi|Q#Sl?69xnz009U<00Izz00bZa0SG{#Zvs(|O|tD>=wb+s*5>+)@I znW@kyiqn}|P?rjHN^N@VZzem=kEE1*S2RVrvTdfF#80;gqeOjo&^RD^w(n%r!ceMR z8v%p?Pesb#5rr^LQ3`^HJa6@m>f( y00Izz00bZa0SG_<0uX=z1ojcY|NlN(sD%IoAOHafKmY;|fB*y_009W}Q{W4~UO=`0 diff --git a/normalized-cache-sqlite-incubating/sqldelight/blob/schema/1.db b/normalized-cache-sqlite-incubating/sqldelight/fields/schema/1.db similarity index 96% rename from normalized-cache-sqlite-incubating/sqldelight/blob/schema/1.db rename to normalized-cache-sqlite-incubating/sqldelight/fields/schema/1.db index 17bc7efe178cf33f29a58d1195823951afc38ba1..d7585ec68ff45e47e5bb2dfa19355fec16eff4c0 100644 GIT binary patch delta 245 zcmZp0XmFU2#UjGMFS4<48DD*4CL_DJxHx08VM$_APHI|aYEBB6U~&#}bqsM;2yt}s zaa8~dDQIvhC}gKrDulR3geds=197O2j}A}}B8?$jmY7qTs^H|~?*vkmn_7~Xl30=m z7br?iPDsrxOHGLf3a2V~`h~c9F!QW58+27C2$J04PA;>kr$I;o9OOtE!XMSD*0CyorTL1t6 delta 107 zcmZp0XmFU2#nQ{b-@92*K%QT_kda+nT%57du_Q4mCp9T2KdBf@FgXXgI)=C^gg83+ zxGI2!COhzpsVW2ndHOmAMJjl^M(S`WfMpb%eEgjh{QN@{{6c+vxHJ_uf9B^E004i> B9zg&A diff --git a/normalized-cache-sqlite-incubating/sqldelight/blob/schema/2.db b/normalized-cache-sqlite-incubating/sqldelight/fields/schema/2.db similarity index 96% rename from normalized-cache-sqlite-incubating/sqldelight/blob/schema/2.db rename to normalized-cache-sqlite-incubating/sqldelight/fields/schema/2.db index 17bc7efe178cf33f29a58d1195823951afc38ba1..d7585ec68ff45e47e5bb2dfa19355fec16eff4c0 100644 GIT binary patch delta 245 zcmZp0XmFU2#UjGMFS4<48DD*4CL_DJxHx08VM$_APHI|aYEBB6U~&#}bqsM;2yt}s zaa8~dDQIvhC}gKrDulR3geds=197O2j}A}}B8?$jmY7qTs^H|~?*vkmn_7~Xl30=m z7br?iPDsrxOHGLf3a2V~`h~c9F!QW58+27C2$J04PA;>kr$I;o9OOtE!XMSD*0CyorTL1t6 delta 107 zcmZp0XmFU2#nQ{b-@92*K%QT_kda+nT%57du_Q4mCp9T2KdBf@FgXXgI)=C^gg83+ zxGI2!COhzpsVW2ndHOmAMJjl^M(S`WfMpb%eEgjh{QN@{{6c+vxHJ_uf9B^E004i> B9zg&A diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCache.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCache.kt index 2420e2ef..ae0ca695 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCache.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCache.kt @@ -2,9 +2,9 @@ package com.apollographql.cache.normalized.sql import com.apollographql.apollo.exception.apolloExceptionHandler import com.apollographql.cache.normalized.api.ApolloCacheHeaders -import com.apollographql.cache.normalized.api.ApolloCacheHeaders.EVICT_AFTER_READ import com.apollographql.cache.normalized.api.CacheHeaders import com.apollographql.cache.normalized.api.CacheKey +import com.apollographql.cache.normalized.api.DefaultRecordMerger import com.apollographql.cache.normalized.api.NormalizedCache import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.RecordMerger @@ -17,66 +17,29 @@ class SqlNormalizedCache internal constructor( private val recordDatabase: RecordDatabase, ) : NormalizedCache { - private fun maybeTransaction(condition: Boolean, block: () -> T): T { - return if (condition) { - recordDatabase.transaction { - block() - } - } else { - block() - } - } - override fun loadRecord(key: String, cacheHeaders: CacheHeaders): Record? { - if (cacheHeaders.hasHeader(ApolloCacheHeaders.MEMORY_CACHE_ONLY)) { - return null - } - val evictAfterRead = cacheHeaders.hasHeader(EVICT_AFTER_READ) - return maybeTransaction(evictAfterRead) { - try { - recordDatabase.select(key) - } catch (e: Exception) { - // Unable to read the record from the database, it is possibly corrupted - treat this as a cache miss - apolloExceptionHandler(Exception("Unable to read a record from the database", e)) - null - }?.also { - if (evictAfterRead) { - recordDatabase.delete(key) - } - } - } + return loadRecords(keys = listOf(key), cacheHeaders = cacheHeaders).firstOrNull() } override fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection { if (cacheHeaders.hasHeader(ApolloCacheHeaders.MEMORY_CACHE_ONLY)) { return emptyList() } - val evictAfterRead = cacheHeaders.hasHeader(EVICT_AFTER_READ) - return maybeTransaction(evictAfterRead) { - try { - internalGetRecords(keys) - } catch (e: Exception) { - // Unable to read the records from the database, it is possibly corrupted - treat this as a cache miss - apolloExceptionHandler(Exception("Unable to read records from the database", e)) - emptyList() - }.also { - if (evictAfterRead) { - it.forEach { record -> - recordDatabase.delete(record.key) - } - } - } + return try { + selectRecords(keys) + } catch (e: Exception) { + // Unable to read the records from the database, it is possibly corrupted - treat this as a cache miss + apolloExceptionHandler(Exception("Unable to read records from the database", e)) + emptyList() } } override fun clearAll() { - recordDatabase.deleteAll() + recordDatabase.deleteAllRecords() } override fun remove(cacheKey: CacheKey, cascade: Boolean): Boolean { - return recordDatabase.transaction { - internalDeleteRecords(setOf(cacheKey.key), cascade) > 0 - } + return remove(cacheKeys = listOf(cacheKey), cascade = cascade) > 0 } override fun remove(cacheKeys: Collection, cascade: Boolean): Int { @@ -87,22 +50,13 @@ class SqlNormalizedCache internal constructor( override fun remove(pattern: String): Int { return recordDatabase.transaction { - recordDatabase.deleteMatching(pattern) + recordDatabase.deleteRecordsMatching(pattern) recordDatabase.changes().toInt() } } override fun merge(record: Record, cacheHeaders: CacheHeaders, recordMerger: RecordMerger): Set { - if (cacheHeaders.hasHeader(ApolloCacheHeaders.DO_NOT_STORE) || cacheHeaders.hasHeader(ApolloCacheHeaders.MEMORY_CACHE_ONLY)) { - return emptySet() - } - return try { - internalUpdateRecord(record = record, cacheHeaders = cacheHeaders, recordMerger = recordMerger) - } catch (e: Exception) { - // Unable to merge the record in the database, it is possibly corrupted - treat this as a cache miss - apolloExceptionHandler(Exception("Unable to merge a record from the database", e)) - emptySet() - } + return merge(records = listOf(record), cacheHeaders = cacheHeaders, recordMerger = recordMerger) } override fun merge(records: Collection, cacheHeaders: CacheHeaders, recordMerger: RecordMerger): Set { @@ -113,24 +67,24 @@ class SqlNormalizedCache internal constructor( internalUpdateRecords(records = records, cacheHeaders = cacheHeaders, recordMerger = recordMerger) } catch (e: Exception) { // Unable to merge the records in the database, it is possibly corrupted - treat this as a cache miss - apolloExceptionHandler(Exception("Unable to merge records from the database", e)) + apolloExceptionHandler(Exception("Unable to merge records into the database", e)) emptySet() } } override fun dump(): Map, Map> { - return mapOf(this::class to recordDatabase.selectAll().associateBy { it.key }) + return mapOf(this::class to recordDatabase.selectAllRecords().associateBy { it.key }) } private fun getReferencedKeysRecursively(keys: Collection, visited: MutableSet = mutableSetOf()): Set { if (keys.isEmpty()) return emptySet() - val referencedKeys = recordDatabase.select(keys - visited).flatMap { it.referencedFields() }.map { it.key }.toSet() + val referencedKeys = recordDatabase.selectRecords(keys - visited).flatMap { it.referencedFields() }.map { it.key }.toSet() visited += keys return referencedKeys + getReferencedKeysRecursively(referencedKeys, visited) } /** - * Assume an enclosing transaction + * Assumes an enclosing transaction */ private fun internalDeleteRecords(keys: Collection, cascade: Boolean): Int { val referencedKeys = if (cascade) { @@ -139,59 +93,48 @@ class SqlNormalizedCache internal constructor( emptySet() } return (keys + referencedKeys).chunked(999).sumOf { chunkedKeys -> - recordDatabase.delete(chunkedKeys) + recordDatabase.deleteRecords(chunkedKeys) recordDatabase.changes().toInt() } } /** - * Update records, loading the previous ones + * Update records. * - * This is an optimization over [internalUpdateRecord] + * As an optimization, the [records] fields are directly upserted into the db when possible. This is possible when using + * the [DefaultRecordMerger], and [ApolloCacheHeaders.ERRORS_REPLACE_CACHED_VALUES] is set to true. + * Otherwise, the [records] must be merged programmatically using the given [recordMerger], requiring to load the existing records from + * the db first. */ private fun internalUpdateRecords(records: Collection, cacheHeaders: CacheHeaders, recordMerger: RecordMerger): Set { - var updatedRecordKeys: Set = emptySet() val receivedDate = cacheHeaders.headerValue(ApolloCacheHeaders.RECEIVED_DATE) val expirationDate = cacheHeaders.headerValue(ApolloCacheHeaders.EXPIRATION_DATE) - recordDatabase.transaction { - val oldRecords = internalGetRecords( - keys = records.map { it.key }, - ).associateBy { it.key } - - updatedRecordKeys = records.flatMap { record -> - val oldRecord = oldRecords[record.key] - if (oldRecord == null) { - recordDatabase.insert(record.withDates(receivedDate = receivedDate, expirationDate = expirationDate)) - record.fieldKeys() - } else { - val (mergedRecord, changedKeys) = recordMerger.merge(RecordMergerContext(existing = oldRecord, incoming = record, cacheHeaders = cacheHeaders)) - if (mergedRecord.isNotEmpty()) { - recordDatabase.update(mergedRecord.withDates(receivedDate = receivedDate, expirationDate = expirationDate)) - } - changedKeys + val errorsReplaceCachedValues = cacheHeaders.headerValue(ApolloCacheHeaders.ERRORS_REPLACE_CACHED_VALUES) == "true" + return if (recordMerger is DefaultRecordMerger && errorsReplaceCachedValues) { + recordDatabase.transaction { + for (record in records) { + recordDatabase.insertOrUpdateRecord(record.withDates(receivedDate = receivedDate, expirationDate = expirationDate)) } - }.toSet() - } - return updatedRecordKeys - } - - /** - * Update a single [Record], loading the previous one - */ - private fun internalUpdateRecord(record: Record, cacheHeaders: CacheHeaders, recordMerger: RecordMerger): Set { - val receivedDate = cacheHeaders.headerValue(ApolloCacheHeaders.RECEIVED_DATE) - val expirationDate = cacheHeaders.headerValue(ApolloCacheHeaders.EXPIRATION_DATE) - return recordDatabase.transaction { - val oldRecord = recordDatabase.select(record.key) - if (oldRecord == null) { - recordDatabase.insert(record.withDates(receivedDate = receivedDate, expirationDate = expirationDate)) + } + records.flatMap { record -> record.fieldKeys() - } else { - val (mergedRecord, changedKeys) = recordMerger.merge(RecordMergerContext(existing = oldRecord, incoming = record, cacheHeaders = cacheHeaders)) - if (mergedRecord.isNotEmpty()) { - recordDatabase.update(mergedRecord.withDates(receivedDate = receivedDate, expirationDate = expirationDate)) - } - changedKeys + }.toSet() + } else { + recordDatabase.transaction { + val existingRecords = selectRecords(records.map { it.key }).associateBy { it.key } + records.flatMap { record -> + val existingRecord = existingRecords[record.key] + if (existingRecord == null) { + recordDatabase.insertOrUpdateRecord(record.withDates(receivedDate = receivedDate, expirationDate = expirationDate)) + record.fieldKeys() + } else { + val (mergedRecord, changedKeys) = recordMerger.merge(RecordMergerContext(existing = existingRecord, incoming = record.withDates(receivedDate = receivedDate, expirationDate = expirationDate), cacheHeaders = cacheHeaders)) + if (mergedRecord.isNotEmpty()) { + recordDatabase.insertOrUpdateRecord(mergedRecord) + } + changedKeys + } + }.toSet() } } } @@ -200,9 +143,9 @@ class SqlNormalizedCache internal constructor( * Loads a list of records, making sure to not query more than 999 at a time * to help with the SQLite limitations */ - private fun internalGetRecords(keys: Collection): List { + private fun selectRecords(keys: Collection): List { return keys.chunked(999).flatMap { chunkedKeys -> - recordDatabase.select(chunkedKeys) + recordDatabase.selectRecords(chunkedKeys) } } } diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordSerializer.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/ApolloJsonElementSerializer.kt similarity index 68% rename from normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordSerializer.kt rename to normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/ApolloJsonElementSerializer.kt index 682e0dea..7f576480 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordSerializer.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/ApolloJsonElementSerializer.kt @@ -1,74 +1,32 @@ package com.apollographql.cache.normalized.sql.internal +import com.apollographql.apollo.api.json.ApolloJsonElement import com.apollographql.apollo.api.Error import com.apollographql.apollo.api.Error.Builder import com.apollographql.apollo.api.json.JsonNumber import com.apollographql.cache.normalized.api.CacheKey -import com.apollographql.cache.normalized.api.Record import okio.Buffer import okio.utf8Size /** - * A serializer that serializes/deserializes a [Record] to a [ByteArray] - * - * It's a very basic implementation that encodes a record like below - * - * number of entries - Int - * ------ - * name of the entry0 - String - * timestamp of entry0 - Long? - * value of entry0 - Any? - * ------ - * name of the entry1 - String - * timestamp of entry1 - Long? - * value of entry1 - Any? - * ------ - * etc... - * - * For each value, the type of the value is encoded using a single identifier byte so that deserialization can deserialize - * to the expected type - * - * This should be revisited/optimized + * A serializer that serializes/deserializes [ApolloJsonElement]s to/from [ByteArray]s. */ -internal object BlobRecordSerializer { - fun serialize(record: Record): ByteArray { +internal object ApolloJsonElementSerializer { + fun serialize(jsonElement: ApolloJsonElement): ByteArray { val buffer = Buffer() - - buffer.writeAny(record.metadata) - val keys = record.fields.keys - buffer.writeInt(keys.size) - for (key in keys) { - buffer.writeString(key) - buffer.writeAny(record.fields[key]) - } - + buffer.writeAny(jsonElement) return buffer.readByteArray() } - /** - * returns the [Record] for the given Json - * - * @throws Exception if the [Record] cannot be deserialized - */ - @Suppress("UNCHECKED_CAST") - fun deserialize(key: String, bytes: ByteArray): Record { + fun deserialize(bytes: ByteArray?): ApolloJsonElement { + if (bytes == null) return null val buffer = Buffer().write(bytes) - - val metadata = buffer.readAny() as Map> - - val fields = mutableMapOf() - val size = buffer.readInt() - - for (i in 0.until(size)) { - val name = buffer.readString() - fields[name] = buffer.readAny() - } - - return Record(key, fields, null, metadata) + return buffer.readAny() } private fun Buffer.writeString(value: String) { - writeInt(value.utf8Size().toInt()) + // TODO: special case for empty string, saves 4 bytes + writeInt(value.utf8Size().toInt()) // TODO: sizes should be unsigned writeUtf8(value) } @@ -76,7 +34,7 @@ internal object BlobRecordSerializer { return readUtf8(readInt().toLong()) } - private fun Buffer.writeAny(value: Any?) { + private fun Buffer.writeAny(value: ApolloJsonElement) { when (value) { is String -> { buffer.writeByte(STRING) @@ -104,7 +62,7 @@ internal object BlobRecordSerializer { } is Boolean -> { - buffer.writeByte(BOOLEAN) + buffer.writeByte(BOOLEAN) // TODO: 1 byte for BOOLEAN_TRUE, 1 byte for BOOLEAN_FALSE buffer.writeByte(if (value) 1 else 0) } @@ -114,16 +72,16 @@ internal object BlobRecordSerializer { } is List<*> -> { - buffer.writeByte(LIST) - buffer.writeInt(value.size) + buffer.writeByte(LIST) // TODO: special case for empty list, saves 4 bytes + buffer.writeInt(value.size) // TODO: sizes should be unsigned value.forEach { buffer.writeAny(it) } } is Map<*, *> -> { - buffer.writeByte(MAP) - buffer.writeInt(value.size) + buffer.writeByte(MAP) // TODO: special case for empty map, saves 4 bytes + buffer.writeInt(value.size) // TODO: sizes should be unsigned @Suppress("UNCHECKED_CAST") value as Map value.forEach { @@ -155,7 +113,7 @@ internal object BlobRecordSerializer { } } - private fun Buffer.readAny(): Any? { + private fun Buffer.readAny(): ApolloJsonElement { return when (val what = readByte().toInt()) { STRING -> readString() INT -> readInt() @@ -213,7 +171,7 @@ internal object BlobRecordSerializer { private const val STRING = 0 private const val INT = 1 - private const val LONG = 2 + private const val LONG = 2 // TODO replace INT and LONG by BYTE, UBYTE, SHORT, USHORT, UINT for smaller values private const val BOOLEAN = 3 private const val DOUBLE = 4 private const val JSON_NUMBER = 5 diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/Blob2RecordDatabase.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/Blob2RecordDatabase.kt deleted file mode 100644 index 97b2bfad..00000000 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/Blob2RecordDatabase.kt +++ /dev/null @@ -1,67 +0,0 @@ -package com.apollographql.cache.normalized.sql.internal - -import com.apollographql.cache.normalized.api.ApolloCacheHeaders -import com.apollographql.cache.normalized.api.Record -import com.apollographql.cache.normalized.sql.internal.blob2.Blob2Queries - -internal class Blob2RecordDatabase(private val blobQueries: Blob2Queries) : RecordDatabase { - override fun select(key: String): Record? { - return blobQueries.recordForKey(key).executeAsList() - .map { - BlobRecordSerializer.deserialize(it.key, it.blob) - } - .singleOrNull() - } - - override fun select(keys: Collection): List { - return blobQueries.recordsForKeys(keys).executeAsList() - .map { - BlobRecordSerializer.deserialize(it.key, it.blob) - } - } - - override fun transaction(noEnclosing: Boolean, body: () -> T): T { - return blobQueries.transactionWithResult { - body() - } - } - - override fun delete(key: String) { - blobQueries.delete(key) - } - - override fun delete(keys: Collection) { - blobQueries.deleteRecords(keys) - } - - override fun deleteMatching(pattern: String) { - blobQueries.deleteRecordsWithKeyMatching(pattern, "\\") - } - - override fun deleteAll() { - blobQueries.deleteAll() - } - - override fun changes(): Long { - return blobQueries.changes().executeAsOne() - } - - override fun insert(record: Record) { - blobQueries.insert(record.key, BlobRecordSerializer.serialize(record), record.receivedDate()) - } - - override fun update(record: Record) { - blobQueries.update(BlobRecordSerializer.serialize(record), record.receivedDate(), record.key) - } - - override fun selectAll(): List { - TODO("Not yet implemented") - } - - /** - * The most recent of the fields' received dates. - */ - private fun Record.receivedDate(): Long? { - return metadata.values.mapNotNull { it[ApolloCacheHeaders.RECEIVED_DATE] as? Long }.maxOrNull() - } -} diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordDatabase.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordDatabase.kt deleted file mode 100644 index 99c2b28e..00000000 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordDatabase.kt +++ /dev/null @@ -1,61 +0,0 @@ -package com.apollographql.cache.normalized.sql.internal - -import com.apollographql.cache.normalized.api.Record -import com.apollographql.cache.normalized.sql.internal.blob.BlobQueries - -internal class BlobRecordDatabase(private val blobQueries: BlobQueries) : RecordDatabase { - override fun select(key: String): Record? { - return blobQueries.recordForKey(key).executeAsList() - .map { - BlobRecordSerializer.deserialize(it.key, it.blob) - } - .singleOrNull() - } - - override fun select(keys: Collection): List { - return blobQueries.recordsForKeys(keys).executeAsList() - .map { - BlobRecordSerializer.deserialize(it.key, it.blob) - } - } - - override fun transaction(noEnclosing: Boolean, body: () -> T): T { - return blobQueries.transactionWithResult { - body() - } - } - - override fun delete(key: String) { - blobQueries.delete(key) - } - - override fun delete(keys: Collection) { - blobQueries.deleteRecords(keys) - } - - override fun deleteMatching(pattern: String) { - blobQueries.deleteRecordsWithKeyMatching(pattern, "\\") - } - - override fun deleteAll() { - blobQueries.deleteAll() - } - - override fun changes(): Long { - return blobQueries.changes().executeAsOne() - } - - override fun insert(record: Record) { - blobQueries.insert(record.key, BlobRecordSerializer.serialize(record)) - } - - override fun update(record: Record) { - blobQueries.update(BlobRecordSerializer.serialize(record), record.key) - } - - override fun selectAll(): List { - return blobQueries.selectRecords().executeAsList().map { - BlobRecordSerializer.deserialize(it.key, it.blob) - } - } -} diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt index f1f93a37..467a4121 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt @@ -1,52 +1,117 @@ package com.apollographql.cache.normalized.sql.internal +import com.apollographql.apollo.api.json.ApolloJsonElement +import com.apollographql.cache.normalized.api.ApolloCacheHeaders import com.apollographql.cache.normalized.api.Record +import com.apollographql.cache.normalized.api.expirationDate +import com.apollographql.cache.normalized.api.receivedDate +import com.apollographql.cache.normalized.sql.internal.fields.Field_ +import com.apollographql.cache.normalized.sql.internal.fields.FieldsQueries -/** - * A database that can store [Record] - * - * All calls are synchronous, the calling code is expected to handle threading. - * - */ -internal interface RecordDatabase { - /** - * @return the [Record] of null if there is no record for the given id - */ - fun select(key: String): Record? +internal class RecordDatabase(private val fieldsQueries: FieldsQueries) { + fun transaction(body: () -> T): T { + return fieldsQueries.transactionWithResult { + body() + } + } /** - * @return the list of records for the given ids - * This is an optimization to avoid doing too many queries. - * - * @param ids the ids to get the record for. [ids.size] must be less than 999 - * @return the [Record] for the ids. If some [Record]s are missing, the returned list size might be - * less that [ids] + * @param keys the keys of the records to select, size must be <= 999 */ - fun select(keys: Collection): List + fun selectRecords(keys: Collection): List { + val fieldsByRecordKey: Map> = fieldsQueries.selectRecords(keys).executeAsList().groupBy { it.key } + return fieldsByRecordKey.toRecords() + } - fun selectAll(): List - - /** - * executes code in a transaction - */ - fun transaction( - noEnclosing: Boolean = false, - body: () -> T, - ): T + fun selectAllRecords(): List { + val fieldsByRecordKey: Map> = fieldsQueries.selectAllRecords().executeAsList().groupBy { it.key } + return fieldsByRecordKey.toRecords() + } - fun delete(key: String) + private fun Map>.toRecords(): List = + mapValues { (key, fieldList) -> + val fields: Map = + fieldList.associate { field -> field.field_ to ApolloJsonElementSerializer.deserialize(field.value_) } - fun delete(keys: Collection) + @Suppress("UNCHECKED_CAST") + val metadata: Map> = + fieldList.associate { field -> + field.field_ to (ApolloJsonElementSerializer.deserialize(field.metadata) as Map?).orEmpty() + + buildMap { + // Dates are stored separately in their own columns + if (field.received_date != null) { + put(ApolloCacheHeaders.RECEIVED_DATE, field.received_date) + } + if (field.expiration_date != null) { + put(ApolloCacheHeaders.EXPIRATION_DATE, field.expiration_date) + } + } + }.filterValues { it.isNotEmpty() } + Record( + key = key, + fields = fields, + metadata = metadata, + ) + }.values.toList() - fun deleteMatching(pattern: String) + fun insertOrUpdateRecord(record: Record) { + for ((field, value) in record.fields) { + insertOrUpdateField( + key = record.key, + field = field, + value = value, + metadata = record.metadata[field], + receivedDate = record.receivedDate(field), + expirationDate = record.expirationDate(field), + ) + } + } - fun deleteAll() + private fun insertOrUpdateField( + key: String, + field: String, + value: ApolloJsonElement, + metadata: Map?, + receivedDate: Long?, + expirationDate: Long?, + ) { + fieldsQueries.insertOrUpdateField( + key = key, + field_ = field, + value_ = ApolloJsonElementSerializer.serialize(value), + metadata = metadata + ?.takeIf { it.isNotEmpty() } + ?.let { + ApolloJsonElementSerializer.serialize( + // Don't store the dates in the metadata as they are stored separately in their own columns + it - ApolloCacheHeaders.RECEIVED_DATE - ApolloCacheHeaders.EXPIRATION_DATE + ) + }, + received_date = receivedDate, + expiration_date = expirationDate, + ) + } /** - * Returns the number of rows affected by the last query + * @param keys the keys of the records to delete, size must be <= 999 */ - fun changes(): Long + fun deleteRecords(keys: Collection) { + fieldsQueries.deleteRecords(keys) + } + + fun deleteRecordsMatching(pattern: String) { + fieldsQueries.deleteRecordsMatching(pattern) + } + + fun deleteAllRecords() { + fieldsQueries.deleteAllRecords() + } + + fun trimByReceivedDate(limit: Int) { + fieldsQueries.trimByReceivedDate(limit.toLong()) + } - fun insert(record: Record) - fun update(record: Record) + fun changes(): Long { + return fieldsQueries.changes().executeAsOne() + } } diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/factoryHelpers.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/factoryHelpers.kt index 0fbdfe92..5e279da7 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/factoryHelpers.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/factoryHelpers.kt @@ -4,7 +4,7 @@ import app.cash.sqldelight.db.QueryResult import app.cash.sqldelight.db.SqlDriver import app.cash.sqldelight.db.SqlSchema import com.apollographql.apollo.exception.apolloExceptionHandler -import com.apollographql.cache.normalized.sql.internal.blob.BlobDatabase +import com.apollographql.cache.normalized.sql.internal.fields.FieldsDatabase internal fun createRecordDatabase(driver: SqlDriver): RecordDatabase { maybeCreateOrMigrateSchema(driver, getSchema()) @@ -30,12 +30,12 @@ internal fun createRecordDatabase(driver: SqlDriver): RecordDatabase { */ } - val expectedTableName = "blobs" + val expectedTableName = "field" check(tableNames.isEmpty() || tableNames.contains(expectedTableName)) { "Apollo: Cannot find the '$expectedTableName' table (found '$tableNames' instead)" } - return BlobRecordDatabase(BlobDatabase(driver).blobQueries) + return RecordDatabase(FieldsDatabase(driver).fieldsQueries) } -internal fun getSchema(): SqlSchema> = BlobDatabase.Schema +internal fun getSchema(): SqlSchema> = FieldsDatabase.Schema diff --git a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/blob/com/apollographql/cache/normalized/sql/internal/blob/blob.sq b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/blob/com/apollographql/cache/normalized/sql/internal/blob/blob.sq deleted file mode 100644 index e82a4938..00000000 --- a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/blob/com/apollographql/cache/normalized/sql/internal/blob/blob.sq +++ /dev/null @@ -1,35 +0,0 @@ -CREATE TABLE blobs ( - key TEXT NOT NULL PRIMARY KEY, - blob BLOB NOT NULL -) WITHOUT ROWID; - -recordForKey: -SELECT key, blob FROM blobs WHERE key=?; - -recordsForKeys: -SELECT key, blob FROM blobs WHERE key IN ?; - -insert: -INSERT INTO blobs (key, blob) VALUES (?,?); - -update: -UPDATE blobs SET blob=:blob WHERE key=:key; - -delete: -DELETE FROM blobs WHERE key=?; - -deleteRecords: -DELETE FROM blobs WHERE key IN ?; - -deleteRecordsWithKeyMatching: -DELETE FROM blobs WHERE key LIKE ? ESCAPE ?; - --- use only for debug -selectRecords: -SELECT * FROM blobs; - -changes: -SELECT changes(); - -deleteAll: -DELETE FROM blobs; diff --git a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/blob/migrations/1.sqm b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/blob/migrations/1.sqm deleted file mode 100644 index f57acba3..00000000 --- a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/blob/migrations/1.sqm +++ /dev/null @@ -1,7 +0,0 @@ --- Version 1 is either the blob schema (do nothing) or the legacy json schema (drop and create) -DROP TABLE IF EXISTS records; - -CREATE TABLE IF NOT EXISTS blobs ( - key TEXT NOT NULL PRIMARY KEY, - blob BLOB NOT NULL -) WITHOUT ROWID; diff --git a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/blob2/com/apollographql/cache/normalized/sql/internal/blob2/blob2.sq b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/blob2/com/apollographql/cache/normalized/sql/internal/blob2/blob2.sq deleted file mode 100644 index 80f8da29..00000000 --- a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/blob2/com/apollographql/cache/normalized/sql/internal/blob2/blob2.sq +++ /dev/null @@ -1,45 +0,0 @@ -CREATE TABLE blobs ( - key TEXT NOT NULL PRIMARY KEY, - blob BLOB NOT NULL, - date INTEGER -) WITHOUT ROWID; - -CREATE INDEX date_idx -ON blobs(date); - -recordForKey: -SELECT key, blob FROM blobs WHERE key=?; - -recordsForKeys: -SELECT key, blob FROM blobs WHERE key IN ?; - -insert: -INSERT INTO blobs (key, blob, date) VALUES (?,?, ?); - -update: -UPDATE blobs SET blob=:blob, date=:date WHERE key=:key; - -delete: -DELETE FROM blobs WHERE key=?; - -deleteRecords: -DELETE FROM blobs WHERE key IN ?; - -deleteRecordsWithKeyMatching: -DELETE FROM blobs WHERE key LIKE ? ESCAPE ?; - --- use only for debug -selectRecords: -SELECT * FROM blobs; - -changes: -SELECT changes(); - -deleteAll: -DELETE FROM blobs; - -count: -SELECT COUNT(*) FROM blobs; - -trim: -DELETE FROM blobs WHERE key IN (SELECT key FROM blobs ORDER BY date LIMIT ?); diff --git a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/apollographql/cache/normalized/sql/internal/fields/fields.sq b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/apollographql/cache/normalized/sql/internal/fields/fields.sq new file mode 100644 index 00000000..dbb55b69 --- /dev/null +++ b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/apollographql/cache/normalized/sql/internal/fields/fields.sq @@ -0,0 +1,41 @@ +CREATE TABLE field ( + key TEXT NOT NULL, + field TEXT NOT NULL, + value BLOB, + metadata BLOB, + received_date INTEGER, + expiration_date INTEGER, + PRIMARY KEY (key, field) ON CONFLICT REPLACE +) +WITHOUT ROWID; + +selectRecords: +SELECT key, field, value, metadata, received_date, expiration_date FROM field WHERE key IN ?; + +selectAllRecords: +SELECT key, field, value, metadata, received_date, expiration_date FROM field; + +insertOrUpdateField: +INSERT INTO field (key, field, value, metadata, received_date, expiration_date) VALUES (?, ?, ?, ?, ?, ?); + +deleteRecords: +DELETE FROM field WHERE key IN ?; + +deleteRecordsMatching: +DELETE FROM field WHERE key LIKE ? ESCAPE '\'; + +deleteAllRecords: +DELETE FROM field; + +trimByReceivedDate: +DELETE FROM field +WHERE (key, field) IN ( + SELECT key, field + FROM field + WHERE received_date IS NOT NULL + ORDER BY received_date ASC + LIMIT ? +); + +changes: +SELECT changes(); diff --git a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/migrations/1.sqm b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/migrations/1.sqm new file mode 100644 index 00000000..00f9cafe --- /dev/null +++ b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/migrations/1.sqm @@ -0,0 +1,13 @@ +-- Version 1 is either the fields schema (do nothing) or the legacy json schema (drop and create) +DROP TABLE IF EXISTS records; + +CREATE TABLE IF NOT EXISTS field ( + key TEXT NOT NULL, + field TEXT NOT NULL, + value BLOB, + metadata BLOB, + received_date INTEGER, + expiration_date INTEGER, + PRIMARY KEY (key, field) ON CONFLICT REPLACE +) +WITHOUT ROWID; diff --git a/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt b/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt index 0ca8f53e..56f42034 100644 --- a/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt +++ b/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt @@ -13,8 +13,8 @@ import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.DefaultRecordMerger import com.apollographql.cache.normalized.api.NormalizedCache import com.apollographql.cache.normalized.api.Record -import com.apollographql.cache.normalized.sql.internal.BlobRecordDatabase -import com.apollographql.cache.normalized.sql.internal.blob.BlobQueries +import com.apollographql.cache.normalized.sql.internal.RecordDatabase +import com.apollographql.cache.normalized.sql.internal.fields.FieldsQueries import kotlin.test.BeforeTest import kotlin.test.Test import kotlin.test.assertEquals @@ -122,35 +122,6 @@ class SqlNormalizedCacheTest { assertNull(cache.loadRecord(STANDARD_KEY, CacheHeaders.NONE)) } - // Tests for StandardCacheHeader compliance - @Test - fun testHeader_evictAfterRead() { - createRecord(STANDARD_KEY) - val record = cache.loadRecord(STANDARD_KEY, CacheHeaders.builder() - .addHeader(ApolloCacheHeaders.EVICT_AFTER_READ, "true").build() - ) - assertNotNull(record) - val nullRecord = cache.loadRecord(STANDARD_KEY, CacheHeaders.builder() - .addHeader(ApolloCacheHeaders.EVICT_AFTER_READ, "true").build() - ) - assertNull(nullRecord) - } - - @Test - fun testHeader_evictAfterBatchRead() { - createRecord(STANDARD_KEY) - createRecord(QUERY_ROOT_KEY) - val selectionSet = setOf(STANDARD_KEY, QUERY_ROOT_KEY) - val records = cache.loadRecords(selectionSet, CacheHeaders.builder() - .addHeader(ApolloCacheHeaders.EVICT_AFTER_READ, "true").build() - ) - assertEquals(records.size, 2) - val emptyRecords = cache.loadRecords(selectionSet, CacheHeaders.builder() - .addHeader(ApolloCacheHeaders.EVICT_AFTER_READ, "true").build() - ) - assertTrue(emptyRecords.isEmpty()) - } - @Test fun testHeader_noCache() { cache.merge( @@ -227,14 +198,14 @@ class SqlNormalizedCacheTest { @Test fun exceptionCallsExceptionHandler() { - val badCache = SqlNormalizedCache(BlobRecordDatabase(BlobQueries(BadDriver))) + val badCache = SqlNormalizedCache(RecordDatabase(FieldsQueries(BadDriver))) var throwable: Throwable? = null apolloExceptionHandler = { throwable = it } badCache.loadRecord(STANDARD_KEY, CacheHeaders.NONE) - assertEquals("Unable to read a record from the database", throwable!!.message) + assertEquals("Unable to read records from the database", throwable!!.message) assertEquals("bad cache", throwable!!.cause!!.message) throwable = null @@ -249,7 +220,7 @@ class SqlNormalizedCacheTest { cacheHeaders = CacheHeaders.NONE, recordMerger = DefaultRecordMerger, ) - assertEquals("Unable to merge a record from the database", throwable!!.message) + assertEquals("Unable to merge records into the database", throwable!!.message) assertEquals("bad cache", throwable!!.cause!!.message) } diff --git a/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt b/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt index 569d3bfe..2411556d 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt @@ -13,6 +13,7 @@ import com.apollographql.cache.normalized.sql.SqlNormalizedCacheFactory import com.apollographql.cache.normalized.store import com.apollographql.mockserver.MockServer import com.apollographql.mockserver.enqueueString +import kotlinx.coroutines.test.TestResult import okio.use import kotlin.test.Test import kotlin.test.assertEquals @@ -21,9 +22,19 @@ import kotlin.test.assertTrue class DanglingReferencesTest { @Test - fun simple() = runTest { + fun simpleMemory() = simple(ApolloStore(MemoryCacheFactory())) + + @Test + fun simpleSql() = simple(ApolloStore(SqlNormalizedCacheFactory())) + + @Test + fun simpleChained(): TestResult { + return simple(ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory()))) + } + + private fun simple(apolloStore: ApolloStore) = runTest { val mockServer = MockServer() - val store = ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory())).also { it.clearAll() } + val store = apolloStore.also { it.clearAll() } ApolloClient.Builder() .serverUrl(mockServer.url()) .store(store) @@ -54,9 +65,17 @@ class DanglingReferencesTest { } @Test - fun multiple() = runTest { + fun multipleMemory() = multiple(ApolloStore(MemoryCacheFactory())) + + @Test + fun multipleSql() = multiple(ApolloStore(SqlNormalizedCacheFactory())) + + @Test + fun multipleChained() = multiple(ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory()))) + + private fun multiple(apolloStore: ApolloStore) = runTest { val mockServer = MockServer() - val store = ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory())).also { it.clearAll() } + val store = apolloStore.also { it.clearAll() } ApolloClient.Builder() .serverUrl(mockServer.url()) .store(store) diff --git a/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt b/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt index 49ed6e34..a8e2a035 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt @@ -23,9 +23,17 @@ import kotlin.time.Duration.Companion.seconds class GarbageCollectTest { @Test - fun garbageCollect() = runTest { + fun garbageCollectMemory() = garbageCollect(ApolloStore(MemoryCacheFactory())) + + @Test + fun garbageCollectSql() = garbageCollect(ApolloStore(SqlNormalizedCacheFactory())) + + @Test + fun garbageCollectChained() = garbageCollect(ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory()))) + + private fun garbageCollect(apolloStore: ApolloStore) = runTest { val mockServer = MockServer() - val store = ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory())).also { it.clearAll() } + val store = apolloStore.also { it.clearAll() } ApolloClient.Builder() .serverUrl(mockServer.url()) .store(store) diff --git a/tests/garbage-collection/src/commonTest/kotlin/ReachableCacheKeysTest.kt b/tests/garbage-collection/src/commonTest/kotlin/ReachableCacheKeysTest.kt index 6e1945f1..625f6bfa 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/ReachableCacheKeysTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/ReachableCacheKeysTest.kt @@ -19,14 +19,21 @@ import okio.use import test.fragment.RepositoryFragment import test.fragment.RepositoryFragmentImpl import kotlin.test.Test -import kotlin.test.assertContentEquals import kotlin.test.assertEquals class ReachableCacheKeysTest { @Test - fun getReachableCacheKeys() = runTest { + fun getReachableCacheKeysMemory() = getReachableCacheKeys(ApolloStore(MemoryCacheFactory())) + + @Test + fun getReachableCacheKeysSql() = getReachableCacheKeys(ApolloStore(SqlNormalizedCacheFactory())) + + @Test + fun getReachableCacheKeysChained() = getReachableCacheKeys(ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory()))) + + private fun getReachableCacheKeys(apolloStore: ApolloStore) = runTest { val mockServer = MockServer() - val store = ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory())).also { it.clearAll() } + val store = apolloStore.also { it.clearAll() } ApolloClient.Builder() .serverUrl(mockServer.url()) .store(store) @@ -127,8 +134,8 @@ class ReachableCacheKeysTest { apolloClient.query(query).fetchPolicy(FetchPolicy.NetworkOnly).execute() var reachableCacheKeys = store.accessCache { it.allRecords().getReachableCacheKeys() } - assertContentEquals( - listOf( + assertEquals( + setOf( CacheKey("QUERY_ROOT"), CacheKey("Repository:8"), CacheKey("Repository:7"), @@ -148,8 +155,8 @@ class ReachableCacheKeysTest { // Remove User 43, now Repositories 5 and 6 should not be reachable / 7 should still be reachable store.remove(CacheKey("User:43"), cascade = false) reachableCacheKeys = store.accessCache { it.allRecords().getReachableCacheKeys() } - assertContentEquals( - listOf( + assertEquals( + setOf( CacheKey("QUERY_ROOT"), CacheKey("Repository:8"), CacheKey("Repository:7"), @@ -170,8 +177,8 @@ class ReachableCacheKeysTest { RepositoryFragment(id = "500", __typename = "Repository", starGazers = emptyList()), ) reachableCacheKeys = store.accessCache { it.allRecords().getReachableCacheKeys() } - assertContentEquals( - listOf( + assertEquals( + setOf( CacheKey("QUERY_ROOT"), CacheKey("Repository:8"), CacheKey("Repository:7"), diff --git a/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt b/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt index a74e8054..14cb0676 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt @@ -30,9 +30,18 @@ import kotlin.time.Duration.Companion.seconds class StaleFieldsTest { @Test - fun clientControlledRemoveFields() = runTest { + fun clientControlledRemoveFieldsMemory() = clientControlledRemoveFields(ApolloStore(MemoryCacheFactory())) + + @Test + fun clientControlledRemoveFieldsSql() = clientControlledRemoveFields(ApolloStore(SqlNormalizedCacheFactory())) + + @Test + fun clientControlledRemoveFieldsChained() = + clientControlledRemoveFields(ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory()))) + + private fun clientControlledRemoveFields(apolloStore: ApolloStore) = runTest { val mockServer = MockServer() - val store = ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory())).also { it.clearAll() } + val store = apolloStore.also { it.clearAll() } ApolloClient.Builder() .serverUrl(mockServer.url()) .store(store) @@ -98,9 +107,18 @@ class StaleFieldsTest { } @Test - fun clientControlledRemoveRecords() = runTest { + fun clientControlledRemoveRecordsMemory() = clientControlledRemoveRecords(ApolloStore(MemoryCacheFactory())) + + @Test + fun clientControlledRemoveRecordsSql() = clientControlledRemoveRecords(ApolloStore(SqlNormalizedCacheFactory())) + + @Test + fun clientControlledRemoveRecordsChained() = + clientControlledRemoveRecords(ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory()))) + + private fun clientControlledRemoveRecords(apolloStore: ApolloStore) = runTest { val mockServer = MockServer() - val store = ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory())).also { it.clearAll() } + val store = apolloStore.also { it.clearAll() } ApolloClient.Builder() .serverUrl(mockServer.url()) .store(store) @@ -167,9 +185,18 @@ class StaleFieldsTest { } @Test - fun serverControlledRemoveFields() = runTest { + fun serverControlledRemoveFieldsMemory() = serverControlledRemoveFields(ApolloStore(MemoryCacheFactory())) + + @Test + fun serverControlledRemoveFieldsSql() = serverControlledRemoveFields(ApolloStore(SqlNormalizedCacheFactory())) + + @Test + fun serverControlledRemoveFieldsChained() = + serverControlledRemoveFields(ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory()))) + + private fun serverControlledRemoveFields(apolloStore: ApolloStore) = runTest { val mockServer = MockServer() - val store = ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory())).also { it.clearAll() } + val store = apolloStore.also { it.clearAll() } ApolloClient.Builder() .serverUrl(mockServer.url()) .store(store) diff --git a/tests/normalized-cache/src/commonTest/kotlin/CacheFlagsTest.kt b/tests/normalized-cache/src/commonTest/kotlin/CacheFlagsTest.kt index 723d4445..43c6f44d 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/CacheFlagsTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/CacheFlagsTest.kt @@ -24,7 +24,6 @@ import kotlinx.coroutines.flow.Flow import kotlinx.coroutines.flow.map import normalizer.HeroNameQuery import kotlin.test.Test -import kotlin.test.assertEquals import kotlin.test.assertIs import kotlin.test.assertNotNull @@ -51,29 +50,6 @@ class CacheFlagsTest { ) } - @Test - fun testEvictAfterRead() = runTest(before = { setUp() }) { - val query = HeroNameQuery() - val data = HeroNameQuery.Data(HeroNameQuery.Hero("R2-D2")) - apolloClient.enqueueTestResponse(query, data) - - // Store the data - apolloClient.query(query).fetchPolicy(FetchPolicy.NetworkOnly).execute() - - // This should work and evict the entries - val response = apolloClient.query(query) - .fetchPolicy(FetchPolicy.CacheOnly) - .cacheHeaders(CacheHeaders.builder().addHeader(ApolloCacheHeaders.EVICT_AFTER_READ, "true").build()) - .execute() - - assertEquals("R2-D2", response.data?.hero?.name) - - // Second time should fail - assertIs( - apolloClient.query(query).fetchPolicy(FetchPolicy.CacheOnly).execute().exception - ) - } - private val partialResponseData = HeroNameQuery.Data(null) private val partialResponseErrors = listOf( Error.Builder(message = "An error Happened") From 0c172463abac14c824325701fd851d2d1bb64a84 Mon Sep 17 00:00:00 2001 From: BoD Date: Fri, 10 Jan 2025 12:22:06 +0100 Subject: [PATCH 05/29] Add support for trim --- .../api/normalized-cache-incubating.api | 7 + .../api/normalized-cache-incubating.klib.api | 2 + .../cache/normalized/api/NormalizedCache.kt | 15 ++ .../cache/normalized/memory/MemoryCache.kt | 8 + .../normalized-cache-sqlite-incubating.api | 2 + .../normalized-cache-sqlite-incubating.api | 6 +- ...ormalized-cache-sqlite-incubating.klib.api | 168 +++++------------- .../normalized/sql/SqlNormalizedCache.kt | 12 ++ .../normalized/sql/internal/RecordDatabase.kt | 27 ++- .../normalized/sql/internal/factoryHelpers.kt | 2 +- .../normalized/sql/internal/fields/fields.sq | 3 + .../normalized/sql/SqlNormalizedCacheTest.kt | 3 +- .../cache/normalized/sql/TrimTest.kt | 20 +-- .../sql/TrimmableNormalizedCacheFactory.kt | 48 ----- 14 files changed, 132 insertions(+), 191 deletions(-) rename normalized-cache-sqlite-incubating/src/{jvmTest => commonTest}/kotlin/com/apollographql/cache/normalized/sql/TrimTest.kt (71%) delete mode 100644 normalized-cache-sqlite-incubating/src/jvmMain/kotlin/com/apollographql/cache/normalized/sql/TrimmableNormalizedCacheFactory.kt diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.api b/normalized-cache-incubating/api/normalized-cache-incubating.api index 4e6d4260..812e3242 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.api @@ -449,12 +449,18 @@ public abstract interface class com/apollographql/cache/normalized/api/Normalize public abstract fun remove (Lcom/apollographql/cache/normalized/api/CacheKey;Z)Z public abstract fun remove (Ljava/lang/String;)I public abstract fun remove (Ljava/util/Collection;Z)I + public abstract fun trim (JF)J } public final class com/apollographql/cache/normalized/api/NormalizedCache$Companion { public final fun prettifyDump (Ljava/util/Map;)Ljava/lang/String; } +public final class com/apollographql/cache/normalized/api/NormalizedCache$DefaultImpls { + public static fun trim (Lcom/apollographql/cache/normalized/api/NormalizedCache;JF)J + public static synthetic fun trim$default (Lcom/apollographql/cache/normalized/api/NormalizedCache;JFILjava/lang/Object;)J +} + public abstract class com/apollographql/cache/normalized/api/NormalizedCacheFactory { public fun ()V public abstract fun create ()Lcom/apollographql/cache/normalized/api/NormalizedCache; @@ -583,6 +589,7 @@ public final class com/apollographql/cache/normalized/memory/MemoryCache : com/a public fun remove (Lcom/apollographql/cache/normalized/api/CacheKey;Z)Z public fun remove (Ljava/lang/String;)I public fun remove (Ljava/util/Collection;Z)I + public fun trim (JF)J } public final class com/apollographql/cache/normalized/memory/MemoryCacheFactory : com/apollographql/cache/normalized/api/NormalizedCacheFactory { diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api index d11ce69c..7e423ce2 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api @@ -60,6 +60,7 @@ abstract interface com.apollographql.cache.normalized.api/NormalizedCache : com. abstract fun remove(com.apollographql.cache.normalized.api/CacheKey, kotlin/Boolean): kotlin/Boolean // com.apollographql.cache.normalized.api/NormalizedCache.remove|remove(com.apollographql.cache.normalized.api.CacheKey;kotlin.Boolean){}[0] abstract fun remove(kotlin.collections/Collection, kotlin/Boolean): kotlin/Int // com.apollographql.cache.normalized.api/NormalizedCache.remove|remove(kotlin.collections.Collection;kotlin.Boolean){}[0] abstract fun remove(kotlin/String): kotlin/Int // com.apollographql.cache.normalized.api/NormalizedCache.remove|remove(kotlin.String){}[0] + open fun trim(kotlin/Long, kotlin/Float = ...): kotlin/Long // com.apollographql.cache.normalized.api/NormalizedCache.trim|trim(kotlin.Long;kotlin.Float){}[0] final object Companion { // com.apollographql.cache.normalized.api/NormalizedCache.Companion|null[0] final fun prettifyDump(kotlin.collections/Map, kotlin.collections/Map>): kotlin/String // com.apollographql.cache.normalized.api/NormalizedCache.Companion.prettifyDump|prettifyDump(kotlin.collections.Map,kotlin.collections.Map>){}[0] @@ -403,6 +404,7 @@ final class com.apollographql.cache.normalized.memory/MemoryCache : com.apollogr final fun remove(com.apollographql.cache.normalized.api/CacheKey, kotlin/Boolean): kotlin/Boolean // com.apollographql.cache.normalized.memory/MemoryCache.remove|remove(com.apollographql.cache.normalized.api.CacheKey;kotlin.Boolean){}[0] final fun remove(kotlin.collections/Collection, kotlin/Boolean): kotlin/Int // com.apollographql.cache.normalized.memory/MemoryCache.remove|remove(kotlin.collections.Collection;kotlin.Boolean){}[0] final fun remove(kotlin/String): kotlin/Int // com.apollographql.cache.normalized.memory/MemoryCache.remove|remove(kotlin.String){}[0] + final fun trim(kotlin/Long, kotlin/Float): kotlin/Long // com.apollographql.cache.normalized.memory/MemoryCache.trim|trim(kotlin.Long;kotlin.Float){}[0] } final class com.apollographql.cache.normalized.memory/MemoryCacheFactory : com.apollographql.cache.normalized.api/NormalizedCacheFactory { // com.apollographql.cache.normalized.memory/MemoryCacheFactory|null[0] diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/NormalizedCache.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/NormalizedCache.kt index f2f79d28..7a25629d 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/NormalizedCache.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/NormalizedCache.kt @@ -84,6 +84,21 @@ interface NormalizedCache : ReadOnlyNormalizedCache { */ fun remove(pattern: String): Int + /** + * Trims the cache if its size exceeds [maxSizeBytes]. The amount of data to remove is determined by [trimFactor]. + * The oldest data is removed according to [ApolloCacheHeaders.RECEIVED_DATE]. + * + * Optional operation. + * + * @param maxSizeBytes the size of the cache in bytes above which the cache should be trimmed. + * @param trimFactor the factor of the cache size to trim. + * @return the cache size in bytes after trimming or -1 if the operation is not supported. + * + * @see com.apollographql.cache.normalized.storeReceiveDate + */ + fun trim(maxSizeBytes: Long, trimFactor: Float = 0.1f): Long { + return -1 + } companion object { @JvmStatic diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt index f7f20f5e..2df66ef7 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt @@ -163,6 +163,14 @@ class MemoryCache( internal fun clearCurrentCache() { lruCache.clear() } + + override fun trim(maxSizeBytes: Long, trimFactor: Float): Long { + return if (nextCache == null) { + -1 + } else { + lockWrite { nextCache.trim(maxSizeBytes, trimFactor) } + } + } } class MemoryCacheFactory @JvmOverloads constructor( diff --git a/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api b/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api index cd21ed2e..e0416e4c 100644 --- a/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api +++ b/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api @@ -19,6 +19,7 @@ public final class com/apollographql/cache/normalized/sql/SqlNormalizedCache : c public fun remove (Lcom/apollographql/cache/normalized/api/CacheKey;Z)Z public fun remove (Ljava/lang/String;)I public fun remove (Ljava/util/Collection;Z)I + public fun trim (JF)J } public final class com/apollographql/cache/normalized/sql/SqlNormalizedCacheFactory_androidKt { @@ -72,6 +73,7 @@ public final class com/apollographql/cache/normalized/sql/internal/fields/Fields public final class com/apollographql/cache/normalized/sql/internal/fields/FieldsQueries : app/cash/sqldelight/TransacterImpl { public fun (Lapp/cash/sqldelight/db/SqlDriver;)V public final fun changes ()Lapp/cash/sqldelight/ExecutableQuery; + public final fun count ()Lapp/cash/sqldelight/Query; public final fun deleteAllRecords ()V public final fun deleteRecords (Ljava/util/Collection;)V public final fun deleteRecordsMatching (Ljava/lang/String;)V diff --git a/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api b/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api index a2269598..b2e89c69 100644 --- a/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api +++ b/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api @@ -8,6 +8,7 @@ public final class com/apollographql/cache/normalized/sql/SqlNormalizedCache : c public fun remove (Lcom/apollographql/cache/normalized/api/CacheKey;Z)Z public fun remove (Ljava/lang/String;)I public fun remove (Ljava/util/Collection;Z)I + public fun trim (JF)J } public final class com/apollographql/cache/normalized/sql/SqlNormalizedCacheFactory_jvmKt { @@ -19,10 +20,6 @@ public final class com/apollographql/cache/normalized/sql/SqlNormalizedCacheFact public static synthetic fun SqlNormalizedCacheFactory$default (Ljava/lang/String;Ljava/util/Properties;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/api/NormalizedCacheFactory; } -public final class com/apollographql/cache/normalized/sql/TrimmableNormalizedCacheFactory : com/apollographql/cache/normalized/api/NormalizedCacheFactory { - public fun create ()Lcom/apollographql/cache/normalized/api/NormalizedCache; -} - public final class com/apollographql/cache/normalized/sql/VersionKt { public static final field VERSION Ljava/lang/String; } @@ -61,6 +58,7 @@ public final class com/apollographql/cache/normalized/sql/internal/fields/Fields public final class com/apollographql/cache/normalized/sql/internal/fields/FieldsQueries : app/cash/sqldelight/TransacterImpl { public fun (Lapp/cash/sqldelight/db/SqlDriver;)V public final fun changes ()Lapp/cash/sqldelight/ExecutableQuery; + public final fun count ()Lapp/cash/sqldelight/Query; public final fun deleteAllRecords ()V public final fun deleteRecords (Ljava/util/Collection;)V public final fun deleteRecordsMatching (Ljava/lang/String;)V diff --git a/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api b/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api index 25be1d66..cb53b0ff 100644 --- a/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api +++ b/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api @@ -6,133 +6,60 @@ // - Show declarations: true // Library unique name: -abstract interface com.apollographql.cache.normalized.sql.internal.blob/BlobDatabase : app.cash.sqldelight/Transacter { // com.apollographql.cache.normalized.sql.internal.blob/BlobDatabase|null[0] - abstract val blobQueries // com.apollographql.cache.normalized.sql.internal.blob/BlobDatabase.blobQueries|{}blobQueries[0] - abstract fun (): com.apollographql.cache.normalized.sql.internal.blob/BlobQueries // com.apollographql.cache.normalized.sql.internal.blob/BlobDatabase.blobQueries.|(){}[0] +abstract interface com.apollographql.cache.normalized.sql.internal.fields/FieldsDatabase : app.cash.sqldelight/Transacter { // com.apollographql.cache.normalized.sql.internal.fields/FieldsDatabase|null[0] + abstract val fieldsQueries // com.apollographql.cache.normalized.sql.internal.fields/FieldsDatabase.fieldsQueries|{}fieldsQueries[0] + abstract fun (): com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries // com.apollographql.cache.normalized.sql.internal.fields/FieldsDatabase.fieldsQueries.|(){}[0] - final object Companion { // com.apollographql.cache.normalized.sql.internal.blob/BlobDatabase.Companion|null[0] - final val Schema // com.apollographql.cache.normalized.sql.internal.blob/BlobDatabase.Companion.Schema|{}Schema[0] - final fun (): app.cash.sqldelight.db/SqlSchema> // com.apollographql.cache.normalized.sql.internal.blob/BlobDatabase.Companion.Schema.|(){}[0] + final object Companion { // com.apollographql.cache.normalized.sql.internal.fields/FieldsDatabase.Companion|null[0] + final val Schema // com.apollographql.cache.normalized.sql.internal.fields/FieldsDatabase.Companion.Schema|{}Schema[0] + final fun (): app.cash.sqldelight.db/SqlSchema> // com.apollographql.cache.normalized.sql.internal.fields/FieldsDatabase.Companion.Schema.|(){}[0] - final fun invoke(app.cash.sqldelight.db/SqlDriver): com.apollographql.cache.normalized.sql.internal.blob/BlobDatabase // com.apollographql.cache.normalized.sql.internal.blob/BlobDatabase.Companion.invoke|invoke(app.cash.sqldelight.db.SqlDriver){}[0] + final fun invoke(app.cash.sqldelight.db/SqlDriver): com.apollographql.cache.normalized.sql.internal.fields/FieldsDatabase // com.apollographql.cache.normalized.sql.internal.fields/FieldsDatabase.Companion.invoke|invoke(app.cash.sqldelight.db.SqlDriver){}[0] } } -abstract interface com.apollographql.cache.normalized.sql.internal.blob2/Blob2Database : app.cash.sqldelight/Transacter { // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Database|null[0] - abstract val blob2Queries // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Database.blob2Queries|{}blob2Queries[0] - abstract fun (): com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Database.blob2Queries.|(){}[0] - - final object Companion { // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Database.Companion|null[0] - final val Schema // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Database.Companion.Schema|{}Schema[0] - final fun (): app.cash.sqldelight.db/SqlSchema> // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Database.Companion.Schema.|(){}[0] - - final fun invoke(app.cash.sqldelight.db/SqlDriver): com.apollographql.cache.normalized.sql.internal.blob2/Blob2Database // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Database.Companion.invoke|invoke(app.cash.sqldelight.db.SqlDriver){}[0] - } -} - -final class com.apollographql.cache.normalized.sql.internal.blob/BlobQueries : app.cash.sqldelight/TransacterImpl { // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries|null[0] - constructor (app.cash.sqldelight.db/SqlDriver) // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.|(app.cash.sqldelight.db.SqlDriver){}[0] - - final fun <#A1: kotlin/Any> recordForKey(kotlin/String, kotlin/Function2): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.recordForKey|recordForKey(kotlin.String;kotlin.Function2){0§}[0] - final fun <#A1: kotlin/Any> recordsForKeys(kotlin.collections/Collection, kotlin/Function2): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.recordsForKeys|recordsForKeys(kotlin.collections.Collection;kotlin.Function2){0§}[0] - final fun <#A1: kotlin/Any> selectRecords(kotlin/Function2): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.selectRecords|selectRecords(kotlin.Function2){0§}[0] - final fun changes(): app.cash.sqldelight/ExecutableQuery // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.changes|changes(){}[0] - final fun delete(kotlin/String) // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.delete|delete(kotlin.String){}[0] - final fun deleteAll() // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.deleteAll|deleteAll(){}[0] - final fun deleteRecords(kotlin.collections/Collection) // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.deleteRecords|deleteRecords(kotlin.collections.Collection){}[0] - final fun deleteRecordsWithKeyMatching(kotlin/String, kotlin/String) // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.deleteRecordsWithKeyMatching|deleteRecordsWithKeyMatching(kotlin.String;kotlin.String){}[0] - final fun insert(kotlin/String, kotlin/ByteArray) // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.insert|insert(kotlin.String;kotlin.ByteArray){}[0] - final fun recordForKey(kotlin/String): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.recordForKey|recordForKey(kotlin.String){}[0] - final fun recordsForKeys(kotlin.collections/Collection): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.recordsForKeys|recordsForKeys(kotlin.collections.Collection){}[0] - final fun selectRecords(): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.selectRecords|selectRecords(){}[0] - final fun update(kotlin/ByteArray, kotlin/String) // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.update|update(kotlin.ByteArray;kotlin.String){}[0] +final class com.apollographql.cache.normalized.sql.internal.fields/Field_ { // com.apollographql.cache.normalized.sql.internal.fields/Field_|null[0] + constructor (kotlin/String, kotlin/String, kotlin/ByteArray?, kotlin/ByteArray?, kotlin/Long?, kotlin/Long?) // com.apollographql.cache.normalized.sql.internal.fields/Field_.|(kotlin.String;kotlin.String;kotlin.ByteArray?;kotlin.ByteArray?;kotlin.Long?;kotlin.Long?){}[0] + + final val expiration_date // com.apollographql.cache.normalized.sql.internal.fields/Field_.expiration_date|{}expiration_date[0] + final fun (): kotlin/Long? // com.apollographql.cache.normalized.sql.internal.fields/Field_.expiration_date.|(){}[0] + final val field_ // com.apollographql.cache.normalized.sql.internal.fields/Field_.field_|{}field_[0] + final fun (): kotlin/String // com.apollographql.cache.normalized.sql.internal.fields/Field_.field_.|(){}[0] + final val key // com.apollographql.cache.normalized.sql.internal.fields/Field_.key|{}key[0] + final fun (): kotlin/String // com.apollographql.cache.normalized.sql.internal.fields/Field_.key.|(){}[0] + final val metadata // com.apollographql.cache.normalized.sql.internal.fields/Field_.metadata|{}metadata[0] + final fun (): kotlin/ByteArray? // com.apollographql.cache.normalized.sql.internal.fields/Field_.metadata.|(){}[0] + final val received_date // com.apollographql.cache.normalized.sql.internal.fields/Field_.received_date|{}received_date[0] + final fun (): kotlin/Long? // com.apollographql.cache.normalized.sql.internal.fields/Field_.received_date.|(){}[0] + final val value_ // com.apollographql.cache.normalized.sql.internal.fields/Field_.value_|{}value_[0] + final fun (): kotlin/ByteArray? // com.apollographql.cache.normalized.sql.internal.fields/Field_.value_.|(){}[0] + + final fun component1(): kotlin/String // com.apollographql.cache.normalized.sql.internal.fields/Field_.component1|component1(){}[0] + final fun component2(): kotlin/String // com.apollographql.cache.normalized.sql.internal.fields/Field_.component2|component2(){}[0] + final fun component3(): kotlin/ByteArray? // com.apollographql.cache.normalized.sql.internal.fields/Field_.component3|component3(){}[0] + final fun component4(): kotlin/ByteArray? // com.apollographql.cache.normalized.sql.internal.fields/Field_.component4|component4(){}[0] + final fun component5(): kotlin/Long? // com.apollographql.cache.normalized.sql.internal.fields/Field_.component5|component5(){}[0] + final fun component6(): kotlin/Long? // com.apollographql.cache.normalized.sql.internal.fields/Field_.component6|component6(){}[0] + final fun copy(kotlin/String = ..., kotlin/String = ..., kotlin/ByteArray? = ..., kotlin/ByteArray? = ..., kotlin/Long? = ..., kotlin/Long? = ...): com.apollographql.cache.normalized.sql.internal.fields/Field_ // com.apollographql.cache.normalized.sql.internal.fields/Field_.copy|copy(kotlin.String;kotlin.String;kotlin.ByteArray?;kotlin.ByteArray?;kotlin.Long?;kotlin.Long?){}[0] + final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.sql.internal.fields/Field_.equals|equals(kotlin.Any?){}[0] + final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.sql.internal.fields/Field_.hashCode|hashCode(){}[0] + final fun toString(): kotlin/String // com.apollographql.cache.normalized.sql.internal.fields/Field_.toString|toString(){}[0] } -final class com.apollographql.cache.normalized.sql.internal.blob/Blobs { // com.apollographql.cache.normalized.sql.internal.blob/Blobs|null[0] - constructor (kotlin/String, kotlin/ByteArray) // com.apollographql.cache.normalized.sql.internal.blob/Blobs.|(kotlin.String;kotlin.ByteArray){}[0] - - final val blob // com.apollographql.cache.normalized.sql.internal.blob/Blobs.blob|{}blob[0] - final fun (): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.blob/Blobs.blob.|(){}[0] - final val key // com.apollographql.cache.normalized.sql.internal.blob/Blobs.key|{}key[0] - final fun (): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob/Blobs.key.|(){}[0] - - final fun component1(): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob/Blobs.component1|component1(){}[0] - final fun component2(): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.blob/Blobs.component2|component2(){}[0] - final fun copy(kotlin/String = ..., kotlin/ByteArray = ...): com.apollographql.cache.normalized.sql.internal.blob/Blobs // com.apollographql.cache.normalized.sql.internal.blob/Blobs.copy|copy(kotlin.String;kotlin.ByteArray){}[0] - final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.sql.internal.blob/Blobs.equals|equals(kotlin.Any?){}[0] - final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.sql.internal.blob/Blobs.hashCode|hashCode(){}[0] - final fun toString(): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob/Blobs.toString|toString(){}[0] -} - -final class com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries : app.cash.sqldelight/TransacterImpl { // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries|null[0] - constructor (app.cash.sqldelight.db/SqlDriver) // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.|(app.cash.sqldelight.db.SqlDriver){}[0] - - final fun <#A1: kotlin/Any> recordForKey(kotlin/String, kotlin/Function2): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.recordForKey|recordForKey(kotlin.String;kotlin.Function2){0§}[0] - final fun <#A1: kotlin/Any> recordsForKeys(kotlin.collections/Collection, kotlin/Function2): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.recordsForKeys|recordsForKeys(kotlin.collections.Collection;kotlin.Function2){0§}[0] - final fun <#A1: kotlin/Any> selectRecords(kotlin/Function3): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.selectRecords|selectRecords(kotlin.Function3){0§}[0] - final fun changes(): app.cash.sqldelight/ExecutableQuery // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.changes|changes(){}[0] - final fun count(): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.count|count(){}[0] - final fun delete(kotlin/String) // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.delete|delete(kotlin.String){}[0] - final fun deleteAll() // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.deleteAll|deleteAll(){}[0] - final fun deleteRecords(kotlin.collections/Collection) // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.deleteRecords|deleteRecords(kotlin.collections.Collection){}[0] - final fun deleteRecordsWithKeyMatching(kotlin/String, kotlin/String) // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.deleteRecordsWithKeyMatching|deleteRecordsWithKeyMatching(kotlin.String;kotlin.String){}[0] - final fun insert(kotlin/String, kotlin/ByteArray, kotlin/Long?) // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.insert|insert(kotlin.String;kotlin.ByteArray;kotlin.Long?){}[0] - final fun recordForKey(kotlin/String): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.recordForKey|recordForKey(kotlin.String){}[0] - final fun recordsForKeys(kotlin.collections/Collection): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.recordsForKeys|recordsForKeys(kotlin.collections.Collection){}[0] - final fun selectRecords(): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.selectRecords|selectRecords(){}[0] - final fun trim(kotlin/Long) // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.trim|trim(kotlin.Long){}[0] - final fun update(kotlin/ByteArray, kotlin/Long?, kotlin/String) // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.update|update(kotlin.ByteArray;kotlin.Long?;kotlin.String){}[0] -} - -final class com.apollographql.cache.normalized.sql.internal.blob2/Blobs { // com.apollographql.cache.normalized.sql.internal.blob2/Blobs|null[0] - constructor (kotlin/String, kotlin/ByteArray, kotlin/Long?) // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.|(kotlin.String;kotlin.ByteArray;kotlin.Long?){}[0] - - final val blob // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.blob|{}blob[0] - final fun (): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.blob.|(){}[0] - final val date // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.date|{}date[0] - final fun (): kotlin/Long? // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.date.|(){}[0] - final val key // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.key|{}key[0] - final fun (): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.key.|(){}[0] - - final fun component1(): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.component1|component1(){}[0] - final fun component2(): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.component2|component2(){}[0] - final fun component3(): kotlin/Long? // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.component3|component3(){}[0] - final fun copy(kotlin/String = ..., kotlin/ByteArray = ..., kotlin/Long? = ...): com.apollographql.cache.normalized.sql.internal.blob2/Blobs // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.copy|copy(kotlin.String;kotlin.ByteArray;kotlin.Long?){}[0] - final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.equals|equals(kotlin.Any?){}[0] - final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.hashCode|hashCode(){}[0] - final fun toString(): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.toString|toString(){}[0] -} - -final class com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey { // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey|null[0] - constructor (kotlin/String, kotlin/ByteArray) // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.|(kotlin.String;kotlin.ByteArray){}[0] - - final val blob // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.blob|{}blob[0] - final fun (): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.blob.|(){}[0] - final val key // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.key|{}key[0] - final fun (): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.key.|(){}[0] - - final fun component1(): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.component1|component1(){}[0] - final fun component2(): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.component2|component2(){}[0] - final fun copy(kotlin/String = ..., kotlin/ByteArray = ...): com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.copy|copy(kotlin.String;kotlin.ByteArray){}[0] - final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.equals|equals(kotlin.Any?){}[0] - final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.hashCode|hashCode(){}[0] - final fun toString(): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.toString|toString(){}[0] -} - -final class com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys { // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys|null[0] - constructor (kotlin/String, kotlin/ByteArray) // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.|(kotlin.String;kotlin.ByteArray){}[0] - - final val blob // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.blob|{}blob[0] - final fun (): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.blob.|(){}[0] - final val key // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.key|{}key[0] - final fun (): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.key.|(){}[0] - - final fun component1(): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.component1|component1(){}[0] - final fun component2(): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.component2|component2(){}[0] - final fun copy(kotlin/String = ..., kotlin/ByteArray = ...): com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.copy|copy(kotlin.String;kotlin.ByteArray){}[0] - final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.equals|equals(kotlin.Any?){}[0] - final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.hashCode|hashCode(){}[0] - final fun toString(): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.toString|toString(){}[0] +final class com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries : app.cash.sqldelight/TransacterImpl { // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries|null[0] + constructor (app.cash.sqldelight.db/SqlDriver) // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.|(app.cash.sqldelight.db.SqlDriver){}[0] + + final fun <#A1: kotlin/Any> selectAllRecords(kotlin/Function6): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.selectAllRecords|selectAllRecords(kotlin.Function6){0§}[0] + final fun <#A1: kotlin/Any> selectRecords(kotlin.collections/Collection, kotlin/Function6): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.selectRecords|selectRecords(kotlin.collections.Collection;kotlin.Function6){0§}[0] + final fun changes(): app.cash.sqldelight/ExecutableQuery // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.changes|changes(){}[0] + final fun count(): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.count|count(){}[0] + final fun deleteAllRecords() // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.deleteAllRecords|deleteAllRecords(){}[0] + final fun deleteRecords(kotlin.collections/Collection) // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.deleteRecords|deleteRecords(kotlin.collections.Collection){}[0] + final fun deleteRecordsMatching(kotlin/String) // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.deleteRecordsMatching|deleteRecordsMatching(kotlin.String){}[0] + final fun insertOrUpdateField(kotlin/String, kotlin/String, kotlin/ByteArray?, kotlin/ByteArray?, kotlin/Long?, kotlin/Long?) // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.insertOrUpdateField|insertOrUpdateField(kotlin.String;kotlin.String;kotlin.ByteArray?;kotlin.ByteArray?;kotlin.Long?;kotlin.Long?){}[0] + final fun selectAllRecords(): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.selectAllRecords|selectAllRecords(){}[0] + final fun selectRecords(kotlin.collections/Collection): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.selectRecords|selectRecords(kotlin.collections.Collection){}[0] + final fun trimByReceivedDate(kotlin/Long) // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.trimByReceivedDate|trimByReceivedDate(kotlin.Long){}[0] } final class com.apollographql.cache.normalized.sql/SqlNormalizedCache : com.apollographql.cache.normalized.api/NormalizedCache { // com.apollographql.cache.normalized.sql/SqlNormalizedCache|null[0] @@ -145,6 +72,7 @@ final class com.apollographql.cache.normalized.sql/SqlNormalizedCache : com.apol final fun remove(com.apollographql.cache.normalized.api/CacheKey, kotlin/Boolean): kotlin/Boolean // com.apollographql.cache.normalized.sql/SqlNormalizedCache.remove|remove(com.apollographql.cache.normalized.api.CacheKey;kotlin.Boolean){}[0] final fun remove(kotlin.collections/Collection, kotlin/Boolean): kotlin/Int // com.apollographql.cache.normalized.sql/SqlNormalizedCache.remove|remove(kotlin.collections.Collection;kotlin.Boolean){}[0] final fun remove(kotlin/String): kotlin/Int // com.apollographql.cache.normalized.sql/SqlNormalizedCache.remove|remove(kotlin.String){}[0] + final fun trim(kotlin/Long, kotlin/Float): kotlin/Long // com.apollographql.cache.normalized.sql/SqlNormalizedCache.trim|trim(kotlin.Long;kotlin.Float){}[0] } final const val com.apollographql.cache.normalized.sql/VERSION // com.apollographql.cache.normalized.sql/VERSION|{}VERSION[0] diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCache.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCache.kt index ae0ca695..a93b159f 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCache.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCache.kt @@ -148,4 +148,16 @@ class SqlNormalizedCache internal constructor( recordDatabase.selectRecords(chunkedKeys) } } + + override fun trim(maxSizeBytes: Long, trimFactor: Float): Long { + val size = recordDatabase.databaseSize() + return if (size >= maxSizeBytes) { + val count = recordDatabase.count().executeAsOne() + recordDatabase.trimByReceivedDate((count * trimFactor).toLong()) + recordDatabase.vacuum() + recordDatabase.databaseSize() + } else { + size + } + } } diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt index 467a4121..8aff37f1 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt @@ -1,14 +1,20 @@ package com.apollographql.cache.normalized.sql.internal +import app.cash.sqldelight.Query +import app.cash.sqldelight.db.QueryResult +import app.cash.sqldelight.db.SqlDriver import com.apollographql.apollo.api.json.ApolloJsonElement import com.apollographql.cache.normalized.api.ApolloCacheHeaders import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.expirationDate import com.apollographql.cache.normalized.api.receivedDate import com.apollographql.cache.normalized.sql.internal.fields.Field_ +import com.apollographql.cache.normalized.sql.internal.fields.FieldsDatabase import com.apollographql.cache.normalized.sql.internal.fields.FieldsQueries -internal class RecordDatabase(private val fieldsQueries: FieldsQueries) { +internal class RecordDatabase(private val driver: SqlDriver) { + private val fieldsQueries: FieldsQueries = FieldsDatabase(driver).fieldsQueries + fun transaction(body: () -> T): T { return fieldsQueries.transactionWithResult { body() @@ -107,8 +113,23 @@ internal class RecordDatabase(private val fieldsQueries: FieldsQueries) { fieldsQueries.deleteAllRecords() } - fun trimByReceivedDate(limit: Int) { - fieldsQueries.trimByReceivedDate(limit.toLong()) + fun databaseSize(): Long { + return driver.executeQuery(null, "SELECT page_count * page_size FROM pragma_page_count(), pragma_page_size();", { + it.next() + QueryResult.Value(it.getLong(0)!!) + }, 0).value + } + + fun count(): Query { + return fieldsQueries.count() + } + + fun trimByReceivedDate(limit: Long) { + fieldsQueries.trimByReceivedDate(limit) + } + + fun vacuum() { + driver.execute(null, "VACUUM", 0) } fun changes(): Long { diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/factoryHelpers.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/factoryHelpers.kt index 5e279da7..68346b05 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/factoryHelpers.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/factoryHelpers.kt @@ -35,7 +35,7 @@ internal fun createRecordDatabase(driver: SqlDriver): RecordDatabase { "Apollo: Cannot find the '$expectedTableName' table (found '$tableNames' instead)" } - return RecordDatabase(FieldsDatabase(driver).fieldsQueries) + return RecordDatabase(driver) } internal fun getSchema(): SqlSchema> = FieldsDatabase.Schema diff --git a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/apollographql/cache/normalized/sql/internal/fields/fields.sq b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/apollographql/cache/normalized/sql/internal/fields/fields.sq index dbb55b69..246a526a 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/apollographql/cache/normalized/sql/internal/fields/fields.sq +++ b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/apollographql/cache/normalized/sql/internal/fields/fields.sq @@ -27,6 +27,9 @@ DELETE FROM field WHERE key LIKE ? ESCAPE '\'; deleteAllRecords: DELETE FROM field; +count: +SELECT count(*) FROM field; + trimByReceivedDate: DELETE FROM field WHERE (key, field) IN ( diff --git a/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt b/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt index 56f42034..bc536c76 100644 --- a/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt +++ b/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt @@ -14,7 +14,6 @@ import com.apollographql.cache.normalized.api.DefaultRecordMerger import com.apollographql.cache.normalized.api.NormalizedCache import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.sql.internal.RecordDatabase -import com.apollographql.cache.normalized.sql.internal.fields.FieldsQueries import kotlin.test.BeforeTest import kotlin.test.Test import kotlin.test.assertEquals @@ -198,7 +197,7 @@ class SqlNormalizedCacheTest { @Test fun exceptionCallsExceptionHandler() { - val badCache = SqlNormalizedCache(RecordDatabase(FieldsQueries(BadDriver))) + val badCache = SqlNormalizedCache(RecordDatabase(BadDriver)) var throwable: Throwable? = null apolloExceptionHandler = { throwable = it diff --git a/normalized-cache-sqlite-incubating/src/jvmTest/kotlin/com/apollographql/cache/normalized/sql/TrimTest.kt b/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/TrimTest.kt similarity index 71% rename from normalized-cache-sqlite-incubating/src/jvmTest/kotlin/com/apollographql/cache/normalized/sql/TrimTest.kt rename to normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/TrimTest.kt index 970f7e54..0e1135d3 100644 --- a/normalized-cache-sqlite-incubating/src/jvmTest/kotlin/com/apollographql/cache/normalized/sql/TrimTest.kt +++ b/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/TrimTest.kt @@ -4,21 +4,14 @@ import com.apollographql.cache.normalized.api.CacheHeaders import com.apollographql.cache.normalized.api.DefaultRecordMerger import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.withDates -import org.junit.Test -import java.io.File +import kotlin.test.Test import kotlin.test.assertEquals import kotlin.test.assertNull class TrimTest { @Test fun trimTest() { - val dbName = "build/test.db" - val dbUrl = "jdbc:sqlite:$dbName" - val dbFile = File(dbName) - - dbFile.delete() - - val cache = TrimmableNormalizedCacheFactory(dbUrl).create() + val cache = SqlNormalizedCacheFactory().create().also { it.clearAll() } val largeString = "".padStart(1024, '?') @@ -40,13 +33,14 @@ class TrimTest { } cache.merge(newRecords, CacheHeaders.NONE, recordMerger = DefaultRecordMerger) - assertEquals(9596928, dbFile.length()) + val sizeBeforeTrim = cache.trim(-1) + assertEquals(8515584, sizeBeforeTrim) // Trim the cache by 10% - val trimmedCache = TrimmableNormalizedCacheFactory(dbUrl, 9596928, 0.1f).create() + val sizeAfterTrim = cache.trim(8515584, 0.1f) - assertEquals(8548352, dbFile.length()) + assertEquals(7667712, sizeAfterTrim) // The oldest key must have been removed - assertNull(trimmedCache.loadRecord("old", CacheHeaders.NONE)) + assertNull(cache.loadRecord("old", CacheHeaders.NONE)) } } diff --git a/normalized-cache-sqlite-incubating/src/jvmMain/kotlin/com/apollographql/cache/normalized/sql/TrimmableNormalizedCacheFactory.kt b/normalized-cache-sqlite-incubating/src/jvmMain/kotlin/com/apollographql/cache/normalized/sql/TrimmableNormalizedCacheFactory.kt deleted file mode 100644 index 10936e1a..00000000 --- a/normalized-cache-sqlite-incubating/src/jvmMain/kotlin/com/apollographql/cache/normalized/sql/TrimmableNormalizedCacheFactory.kt +++ /dev/null @@ -1,48 +0,0 @@ -package com.apollographql.cache.normalized.sql - -import app.cash.sqldelight.driver.jdbc.sqlite.JdbcSqliteDriver -import com.apollographql.cache.normalized.api.NormalizedCache -import com.apollographql.cache.normalized.api.NormalizedCacheFactory -import com.apollographql.cache.normalized.sql.internal.Blob2RecordDatabase -import com.apollographql.cache.normalized.sql.internal.blob2.Blob2Database -import com.apollographql.cache.normalized.sql.internal.maybeCreateOrMigrateSchema -import java.io.File - -/** - * Experimental database that supports trimming at startup - * - * There are no backward compatibilities, DO NOT ship in a production app - * - * @param url Database connection URL in the form of `jdbc:sqlite:path` where `path` is either blank - * @param maxSize if the size of the database is bigger than [maxSize] (in bytes), it will be trimmed - * @param trimFactor the amount of trimming to do - */ -class TrimmableNormalizedCacheFactory internal constructor( - private val url: String, - private val maxSize: Long? = null, - private val trimFactor: Float = 0.1f, -) : NormalizedCacheFactory() { - private val driver = JdbcSqliteDriver(url) - - override fun create(): NormalizedCache { - maybeCreateOrMigrateSchema(driver, Blob2Database.Schema) - - val database = Blob2Database(driver) - val queries = database.blob2Queries - if (maxSize != null) { - val path = url.substringAfter("jdbc:sqlite:") - if (path.isNotBlank()) { - val size = File(path).length() - if (size >= maxSize) { - val count = queries.count().executeAsOne() - queries.trim((count * trimFactor).toLong()) - driver.execute(null, "VACUUM", 0) - } - } - } - - return SqlNormalizedCache(Blob2RecordDatabase(queries)) - } -} - - From 9aff3c932e6330640fa0c590e0bc22c4a1baa0f0 Mon Sep 17 00:00:00 2001 From: BoD Date: Mon, 10 Feb 2025 16:29:33 +0100 Subject: [PATCH 06/29] Rename 'key' -> 'record' --- .../normalized-cache-sqlite-incubating.api | 2 +- .../jvm/normalized-cache-sqlite-incubating.api | 2 +- ...normalized-cache-sqlite-incubating.klib.api | 4 ++-- .../normalized/sql/internal/RecordDatabase.kt | 10 +++++----- .../normalized/sql/internal/fields/fields.sq | 18 +++++++++--------- .../sqldelight/fields/com/migrations/1.sqm | 4 ++-- 6 files changed, 20 insertions(+), 20 deletions(-) diff --git a/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api b/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api index e0416e4c..7a155430 100644 --- a/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api +++ b/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api @@ -52,9 +52,9 @@ public final class com/apollographql/cache/normalized/sql/internal/fields/Field_ public fun equals (Ljava/lang/Object;)Z public final fun getExpiration_date ()Ljava/lang/Long; public final fun getField_ ()Ljava/lang/String; - public final fun getKey ()Ljava/lang/String; public final fun getMetadata ()[B public final fun getReceived_date ()Ljava/lang/Long; + public final fun getRecord ()Ljava/lang/String; public final fun getValue_ ()[B public fun hashCode ()I public fun toString ()Ljava/lang/String; diff --git a/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api b/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api index b2e89c69..18897513 100644 --- a/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api +++ b/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api @@ -37,9 +37,9 @@ public final class com/apollographql/cache/normalized/sql/internal/fields/Field_ public fun equals (Ljava/lang/Object;)Z public final fun getExpiration_date ()Ljava/lang/Long; public final fun getField_ ()Ljava/lang/String; - public final fun getKey ()Ljava/lang/String; public final fun getMetadata ()[B public final fun getReceived_date ()Ljava/lang/Long; + public final fun getRecord ()Ljava/lang/String; public final fun getValue_ ()[B public fun hashCode ()I public fun toString ()Ljava/lang/String; diff --git a/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api b/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api index cb53b0ff..ab99778a 100644 --- a/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api +++ b/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api @@ -25,12 +25,12 @@ final class com.apollographql.cache.normalized.sql.internal.fields/Field_ { // c final fun (): kotlin/Long? // com.apollographql.cache.normalized.sql.internal.fields/Field_.expiration_date.|(){}[0] final val field_ // com.apollographql.cache.normalized.sql.internal.fields/Field_.field_|{}field_[0] final fun (): kotlin/String // com.apollographql.cache.normalized.sql.internal.fields/Field_.field_.|(){}[0] - final val key // com.apollographql.cache.normalized.sql.internal.fields/Field_.key|{}key[0] - final fun (): kotlin/String // com.apollographql.cache.normalized.sql.internal.fields/Field_.key.|(){}[0] final val metadata // com.apollographql.cache.normalized.sql.internal.fields/Field_.metadata|{}metadata[0] final fun (): kotlin/ByteArray? // com.apollographql.cache.normalized.sql.internal.fields/Field_.metadata.|(){}[0] final val received_date // com.apollographql.cache.normalized.sql.internal.fields/Field_.received_date|{}received_date[0] final fun (): kotlin/Long? // com.apollographql.cache.normalized.sql.internal.fields/Field_.received_date.|(){}[0] + final val record // com.apollographql.cache.normalized.sql.internal.fields/Field_.record|{}record[0] + final fun (): kotlin/String // com.apollographql.cache.normalized.sql.internal.fields/Field_.record.|(){}[0] final val value_ // com.apollographql.cache.normalized.sql.internal.fields/Field_.value_|{}value_[0] final fun (): kotlin/ByteArray? // com.apollographql.cache.normalized.sql.internal.fields/Field_.value_.|(){}[0] diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt index 8aff37f1..91966076 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt @@ -25,12 +25,12 @@ internal class RecordDatabase(private val driver: SqlDriver) { * @param keys the keys of the records to select, size must be <= 999 */ fun selectRecords(keys: Collection): List { - val fieldsByRecordKey: Map> = fieldsQueries.selectRecords(keys).executeAsList().groupBy { it.key } + val fieldsByRecordKey: Map> = fieldsQueries.selectRecords(keys).executeAsList().groupBy { it.record } return fieldsByRecordKey.toRecords() } fun selectAllRecords(): List { - val fieldsByRecordKey: Map> = fieldsQueries.selectAllRecords().executeAsList().groupBy { it.key } + val fieldsByRecordKey: Map> = fieldsQueries.selectAllRecords().executeAsList().groupBy { it.record } return fieldsByRecordKey.toRecords() } @@ -63,7 +63,7 @@ internal class RecordDatabase(private val driver: SqlDriver) { fun insertOrUpdateRecord(record: Record) { for ((field, value) in record.fields) { insertOrUpdateField( - key = record.key, + record = record.key, field = field, value = value, metadata = record.metadata[field], @@ -74,7 +74,7 @@ internal class RecordDatabase(private val driver: SqlDriver) { } private fun insertOrUpdateField( - key: String, + record: String, field: String, value: ApolloJsonElement, metadata: Map?, @@ -82,7 +82,7 @@ internal class RecordDatabase(private val driver: SqlDriver) { expirationDate: Long?, ) { fieldsQueries.insertOrUpdateField( - key = key, + record = record, field_ = field, value_ = ApolloJsonElementSerializer.serialize(value), metadata = metadata diff --git a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/apollographql/cache/normalized/sql/internal/fields/fields.sq b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/apollographql/cache/normalized/sql/internal/fields/fields.sq index 246a526a..f33bcd5e 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/apollographql/cache/normalized/sql/internal/fields/fields.sq +++ b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/apollographql/cache/normalized/sql/internal/fields/fields.sq @@ -1,28 +1,28 @@ CREATE TABLE field ( - key TEXT NOT NULL, + record TEXT NOT NULL, field TEXT NOT NULL, value BLOB, metadata BLOB, received_date INTEGER, expiration_date INTEGER, - PRIMARY KEY (key, field) ON CONFLICT REPLACE + PRIMARY KEY (record, field) ON CONFLICT REPLACE ) WITHOUT ROWID; selectRecords: -SELECT key, field, value, metadata, received_date, expiration_date FROM field WHERE key IN ?; +SELECT record, field, value, metadata, received_date, expiration_date FROM field WHERE record IN ?; selectAllRecords: -SELECT key, field, value, metadata, received_date, expiration_date FROM field; +SELECT record, field, value, metadata, received_date, expiration_date FROM field; insertOrUpdateField: -INSERT INTO field (key, field, value, metadata, received_date, expiration_date) VALUES (?, ?, ?, ?, ?, ?); +INSERT INTO field (record, field, value, metadata, received_date, expiration_date) VALUES (?, ?, ?, ?, ?, ?); deleteRecords: -DELETE FROM field WHERE key IN ?; +DELETE FROM field WHERE record IN ?; deleteRecordsMatching: -DELETE FROM field WHERE key LIKE ? ESCAPE '\'; +DELETE FROM field WHERE record LIKE ? ESCAPE '\'; deleteAllRecords: DELETE FROM field; @@ -32,8 +32,8 @@ SELECT count(*) FROM field; trimByReceivedDate: DELETE FROM field -WHERE (key, field) IN ( - SELECT key, field +WHERE (record, field) IN ( + SELECT record, field FROM field WHERE received_date IS NOT NULL ORDER BY received_date ASC diff --git a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/migrations/1.sqm b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/migrations/1.sqm index 00f9cafe..efe1223b 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/migrations/1.sqm +++ b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/migrations/1.sqm @@ -2,12 +2,12 @@ DROP TABLE IF EXISTS records; CREATE TABLE IF NOT EXISTS field ( - key TEXT NOT NULL, + record TEXT NOT NULL, field TEXT NOT NULL, value BLOB, metadata BLOB, received_date INTEGER, expiration_date INTEGER, - PRIMARY KEY (key, field) ON CONFLICT REPLACE + PRIMARY KEY (record, field) ON CONFLICT REPLACE ) WITHOUT ROWID; From ce594f12c7d07234f235a9efe4bff80705690af4 Mon Sep 17 00:00:00 2001 From: BoD Date: Mon, 17 Feb 2025 18:59:52 +0100 Subject: [PATCH 07/29] Minor ApolloJsonElementSerializer optimizations --- .../internal/ApolloJsonElementSerializer.kt | 161 ++++++++++++------ 1 file changed, 111 insertions(+), 50 deletions(-) diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/ApolloJsonElementSerializer.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/ApolloJsonElementSerializer.kt index 7f576480..dd3988df 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/ApolloJsonElementSerializer.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/ApolloJsonElementSerializer.kt @@ -25,30 +25,66 @@ internal object ApolloJsonElementSerializer { } private fun Buffer.writeString(value: String) { - // TODO: special case for empty string, saves 4 bytes - writeInt(value.utf8Size().toInt()) // TODO: sizes should be unsigned + writeNumber(value.utf8Size()) writeUtf8(value) } private fun Buffer.readString(): String { - return readUtf8(readInt().toLong()) + return readUtf8(readNumber().toLong()) + } + + private fun Buffer.writeNumber(value: Number) { + when (value.toLong()) { + 0L -> { + writeByte(NUMBER_0) + } + + in Byte.MIN_VALUE..Byte.MAX_VALUE -> { + writeByte(NUMBER_BYTE) + writeByte(value.toInt()) + } + + in Short.MIN_VALUE..Short.MAX_VALUE -> { + writeByte(NUMBER_SHORT) + writeShort(value.toInt()) + } + + in Int.MIN_VALUE..Int.MAX_VALUE -> { + writeByte(NUMBER_INT) + writeInt(value.toInt()) + } + + else -> { + writeByte(NUMBER_LONG) + writeLong(value.toLong()) + } + } + } + + private fun Buffer.readNumber(): Number { + return when (val what = readByte().toInt()) { + NUMBER_0 -> 0 + NUMBER_BYTE -> readByte() + NUMBER_SHORT -> readShort() + NUMBER_INT -> readInt() + NUMBER_LONG -> readLong() + else -> error("Trying to read unsupported Number type: $what") + } } private fun Buffer.writeAny(value: ApolloJsonElement) { when (value) { is String -> { - buffer.writeByte(STRING) - buffer.writeString(value) - } - - is Int -> { - buffer.writeByte(INT) - buffer.writeInt(value) + if (value.isEmpty()) { + writeByte(EMPTY_STRING) + } else { + writeByte(STRING) + writeString(value) + } } - is Long -> { - buffer.writeByte(LONG) - buffer.writeLong(value) + is Int, is Long -> { + writeNumber(value) } is Double -> { @@ -62,8 +98,11 @@ internal object ApolloJsonElementSerializer { } is Boolean -> { - buffer.writeByte(BOOLEAN) // TODO: 1 byte for BOOLEAN_TRUE, 1 byte for BOOLEAN_FALSE - buffer.writeByte(if (value) 1 else 0) + if (value) { + buffer.writeByte(BOOLEAN_TRUE) + } else { + buffer.writeByte(BOOLEAN_FALSE) + } } is CacheKey -> { @@ -72,21 +111,29 @@ internal object ApolloJsonElementSerializer { } is List<*> -> { - buffer.writeByte(LIST) // TODO: special case for empty list, saves 4 bytes - buffer.writeInt(value.size) // TODO: sizes should be unsigned - value.forEach { - buffer.writeAny(it) + if (value.isEmpty()) { + buffer.writeByte(EMPTY_LIST) + } else { + buffer.writeByte(LIST) + buffer.writeNumber(value.size) + value.forEach { + buffer.writeAny(it) + } } } is Map<*, *> -> { - buffer.writeByte(MAP) // TODO: special case for empty map, saves 4 bytes - buffer.writeInt(value.size) // TODO: sizes should be unsigned - @Suppress("UNCHECKED_CAST") - value as Map - value.forEach { - buffer.writeString(it.key) - buffer.writeAny(it.value) + if (value.isEmpty()) { + buffer.writeByte(MAP_EMPTY) + } else { + buffer.writeByte(MAP) + buffer.writeNumber(value.size) + @Suppress("UNCHECKED_CAST") + value as Map + value.forEach { + buffer.writeString(it.key) + buffer.writeAny(it.value) + } } } @@ -97,12 +144,12 @@ internal object ApolloJsonElementSerializer { is Error -> { buffer.writeByte(ERROR) buffer.writeString(value.message) - buffer.writeInt(value.locations?.size ?: 0) + buffer.writeNumber(value.locations?.size ?: 0) for (location in value.locations.orEmpty()) { - buffer.writeInt(location.line) - buffer.writeInt(location.column) + buffer.writeNumber(location.line) + buffer.writeNumber(location.column) } - buffer.writeInt(value.path?.size ?: 0) + buffer.writeNumber(value.path?.size ?: 0) for (path in value.path.orEmpty()) { buffer.writeAny(path) } @@ -116,37 +163,44 @@ internal object ApolloJsonElementSerializer { private fun Buffer.readAny(): ApolloJsonElement { return when (val what = readByte().toInt()) { STRING -> readString() - INT -> readInt() - LONG -> readLong() + EMPTY_STRING -> "" + NUMBER_0 -> 0 + NUMBER_BYTE -> readByte().toInt() + NUMBER_SHORT -> readShort().toInt() + NUMBER_INT -> readInt() + NUMBER_LONG -> readLong() DOUBLE -> Double.fromBits(readLong()) JSON_NUMBER -> JsonNumber(readString()) - BOOLEAN -> readByte() > 0 + BOOLEAN_TRUE -> true + BOOLEAN_FALSE -> false CACHE_KEY -> { CacheKey(readString()) } LIST -> { - val size = readInt() + val size = readNumber().toInt() 0.until(size).map { readAny() } } + EMPTY_LIST -> emptyList() MAP -> { - val size = readInt() + val size = readNumber().toInt() 0.until(size).associate { readString() to readAny() } } + MAP_EMPTY -> emptyMap() NULL -> null ERROR -> { val message = readString() - val locations = 0.until(readInt()).map { - Error.Location(readInt(), readInt()) + val locations = 0.until(readNumber().toInt()).map { + Error.Location(readNumber().toInt(), readNumber().toInt()) } - val path = 0.until(readInt()).map { + val path = 0.until(readNumber().toInt()).map { readAny()!! } @@ -165,19 +219,26 @@ internal object ApolloJsonElementSerializer { .build() } - else -> error("Trying to read unsupported Record value: $what") + else -> error("Trying to read unsupported Record type: $what") } } - private const val STRING = 0 - private const val INT = 1 - private const val LONG = 2 // TODO replace INT and LONG by BYTE, UBYTE, SHORT, USHORT, UINT for smaller values - private const val BOOLEAN = 3 - private const val DOUBLE = 4 - private const val JSON_NUMBER = 5 - private const val LIST = 6 - private const val MAP = 7 - private const val CACHE_KEY = 8 - private const val NULL = 9 - private const val ERROR = 10 + private const val NULL = 0 + private const val STRING = 1 + private const val EMPTY_STRING = 2 + private const val NUMBER_0 = 3 + private const val NUMBER_BYTE = 4 + private const val NUMBER_SHORT = 5 + private const val NUMBER_INT = 6 + private const val NUMBER_LONG = 7 + private const val BOOLEAN_TRUE = 8 + private const val BOOLEAN_FALSE = 9 + private const val DOUBLE = 10 + private const val JSON_NUMBER = 11 + private const val LIST = 12 + private const val EMPTY_LIST = 13 + private const val MAP = 14 + private const val MAP_EMPTY = 15 + private const val CACHE_KEY = 16 + private const val ERROR = 17 } From ed1057e017f4b02b2d551e9f6790f187daf9c47e Mon Sep 17 00:00:00 2001 From: BoD Date: Wed, 19 Mar 2025 16:07:44 +0100 Subject: [PATCH 08/29] Do not call propagateErrors if there are no errors --- .../cache/normalized/api/DataWithErrors.kt | 16 ++++++++++++++++ .../normalized/internal/DefaultApolloStore.kt | 7 ++++++- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/DataWithErrors.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/DataWithErrors.kt index 0bc9a7a9..0c76c196 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/DataWithErrors.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/DataWithErrors.kt @@ -186,3 +186,19 @@ private fun CompiledSelection.fieldSelection(responseName: String): CompiledFiel .build() } } + + +@Suppress("UNCHECKED_CAST") +internal fun Any?.hasErrors(): Boolean { + val queue = ArrayDeque() + queue.add(this) + while (queue.isNotEmpty()) { + val current = queue.removeFirst() + when (current) { + is Error -> return true + is Map<*, *> -> queue.addAll(current.values) + is List<*> -> queue.addAll(current) + } + } + return false +} diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt index 74cd4ab9..c2009e67 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt @@ -25,6 +25,7 @@ import com.apollographql.cache.normalized.api.NormalizedCache import com.apollographql.cache.normalized.api.NormalizedCacheFactory import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.RecordMerger +import com.apollographql.cache.normalized.api.hasErrors import com.apollographql.cache.normalized.api.propagateErrors import com.apollographql.cache.normalized.api.withErrors import com.apollographql.cache.normalized.cacheHeaders @@ -140,7 +141,11 @@ internal class DefaultApolloStore( @Suppress("UNCHECKED_CAST") val dataWithNulls: Map? = - propagateErrors(dataWithErrors, operation.rootField(), errors) as Map? + if (dataWithErrors.hasErrors()) { + propagateErrors(dataWithErrors, operation.rootField(), errors) + } else { + dataWithErrors + } as Map? val falseVariablesCustomScalarAdapter = customScalarAdapters.newBuilder() .falseVariables(variables.valueMap.filter { it.value == false }.keys) From ee20dca310d44d03eda6e3402e63c0f3a694f600 Mon Sep 17 00:00:00 2001 From: BoD Date: Wed, 19 Mar 2025 16:13:54 +0100 Subject: [PATCH 09/29] Minor optimization in toRecords() --- .../normalized/sql/internal/RecordDatabase.kt | 36 ++++++++++--------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt index c24cd80a..fd5e5341 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt @@ -37,26 +37,30 @@ internal class RecordDatabase(private val driver: SqlDriver) { private fun Map>.toRecords(): List = mapValues { (key, fieldList) -> - val fields: Map = - fieldList.associate { field -> field.field_ to ApolloJsonElementSerializer.deserialize(field.value_) } + val fieldValues: Map = fieldList.associate { field -> + field.field_ to ApolloJsonElementSerializer.deserialize(field.value_) + } @Suppress("UNCHECKED_CAST") - val metadata: Map> = - fieldList.associate { field -> - field.field_ to (ApolloJsonElementSerializer.deserialize(field.metadata) as Map?).orEmpty() + - buildMap { - // Dates are stored separately in their own columns - if (field.received_date != null) { - put(ApolloCacheHeaders.RECEIVED_DATE, field.received_date) - } - if (field.expiration_date != null) { - put(ApolloCacheHeaders.EXPIRATION_DATE, field.expiration_date) - } - } - }.filterValues { it.isNotEmpty() } + val metadata: Map> = fieldList.associate { field -> + val deserializedMetadata = ApolloJsonElementSerializer.deserialize(field.metadata) as Map? + field.field_ to LinkedHashMap((deserializedMetadata?.size ?: 0) + 2).also { + if (deserializedMetadata != null) { + it.putAll(deserializedMetadata) + } + + // Dates are stored separately in their own columns + if (field.received_date != null) { + it.put(ApolloCacheHeaders.RECEIVED_DATE, field.received_date) + } + if (field.expiration_date != null) { + it.put(ApolloCacheHeaders.EXPIRATION_DATE, field.expiration_date) + } + } + }.filterValues { it.isNotEmpty() } Record( key = CacheKey(key, isHashed = true), - fields = fields, + fields = fieldValues, metadata = metadata, ) }.values.toList() From 2dd369769fc901085ae4cdfad7e60a6110472d53 Mon Sep 17 00:00:00 2001 From: BoD Date: Fri, 21 Mar 2025 17:25:09 +0100 Subject: [PATCH 10/29] Use ByteString in CacheKey and blob in SQL. --- .../api/normalized-cache-incubating.api | 95 ++++++++++--------- .../api/normalized-cache-incubating.klib.api | 18 ++-- .../cache/normalized/ApolloStore.kt | 4 +- .../cache/normalized/api/CacheKey.kt | 55 +++++++---- .../cache/normalized/api/CacheResolver.kt | 6 +- .../cache/normalized/api/NormalizedCache.kt | 2 +- .../normalized/internal/CacheBatchReader.kt | 6 +- .../normalized/internal/DefaultApolloStore.kt | 6 +- .../cache/normalized/internal/Normalizer.kt | 38 +++----- .../normalized/internal/RecordWeigher.kt | 4 +- .../cache/normalized/memory/MemoryCache.kt | 2 +- .../cache/normalized/CacheKeyResolverTest.kt | 2 +- .../cache/normalized/MemoryCacheTest.kt | 4 +- .../cache/normalized/RecordWeigherTest.kt | 2 +- .../normalized-cache-sqlite-incubating.api | 17 ++-- .../normalized-cache-sqlite-incubating.api | 17 ++-- ...ormalized-cache-sqlite-incubating.klib.api | 19 ++-- .../normalized/sql/SqlNormalizedCache.kt | 15 ++- .../internal/ApolloJsonElementSerializer.kt | 6 +- .../normalized/sql/internal/RecordDatabase.kt | 24 ++--- .../normalized/sql/internal/fields/fields.sq | 5 +- .../normalized/sql/SqlNormalizedCacheTest.kt | 3 +- .../kotlin/test/DeferNormalizedCacheTest.kt | 5 +- .../kotlin/DanglingReferencesTest.kt | 13 +-- .../commonTest/kotlin/GarbageCollectTest.kt | 25 ++--- .../src/commonTest/kotlin/StaleFieldsTest.kt | 37 ++++---- .../src/commonTest/kotlin/FetchPolicyTest.kt | 3 +- .../src/commonTest/kotlin/NormalizerTest.kt | 36 +++---- .../src/commonTest/kotlin/OtherCacheTest.kt | 4 +- .../FragmentNormalizerTest.kt | 5 +- .../kotlin/CacheMissLoggingInterceptorTest.kt | 4 +- .../kotlin/test/CachePartialResultTest.kt | 33 ++++--- 32 files changed, 277 insertions(+), 238 deletions(-) diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.api b/normalized-cache-incubating/api/normalized-cache-incubating.api index 4a16c978..b2a2866d 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.api @@ -5,18 +5,18 @@ public abstract interface class com/apollographql/cache/normalized/ApolloStore { public abstract fun dispose ()V public abstract fun dump ()Ljava/util/Map; public abstract fun getChangedKeys ()Lkotlinx/coroutines/flow/SharedFlow; - public abstract fun normalize (Lcom/apollographql/apollo/api/Executable;Ljava/util/Map;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;)Ljava/util/Map; + public abstract fun normalize-niOPdRo (Lcom/apollographql/apollo/api/Executable;Ljava/util/Map;Lokio/ByteString;Lcom/apollographql/apollo/api/CustomScalarAdapters;)Ljava/util/Map; public abstract fun publish (Ljava/util/Set;Lkotlin/coroutines/Continuation;)Ljava/lang/Object; - public abstract fun readFragment-dEpVOtE (Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/ApolloStore$ReadResult; + public abstract fun readFragment-dEpVOtE (Lcom/apollographql/apollo/api/Fragment;Lokio/ByteString;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/ApolloStore$ReadResult; public abstract fun readOperation (Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/apollo/api/ApolloResponse; public abstract fun remove (Ljava/util/List;Z)I - public abstract fun remove-eNSUWrY (Ljava/lang/String;Z)Z + public abstract fun remove-eNSUWrY (Lokio/ByteString;Z)Z public abstract fun rollbackOptimisticUpdates (Ljava/util/UUID;)Ljava/util/Set; - public abstract fun writeFragment-1qdIjGk (Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/Fragment$Data;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Set; + public abstract fun writeFragment-1qdIjGk (Lcom/apollographql/apollo/api/Fragment;Lokio/ByteString;Lcom/apollographql/apollo/api/Fragment$Data;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Set; public abstract fun writeOperation (Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/Operation$Data;Ljava/util/List;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Set; public abstract fun writeOperation (Lcom/apollographql/apollo/api/Operation;Ljava/util/Map;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Set; public abstract fun writeOptimisticUpdates (Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/Operation$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;)Ljava/util/Set; - public abstract fun writeOptimisticUpdates-1qdIjGk (Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/Fragment$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;)Ljava/util/Set; + public abstract fun writeOptimisticUpdates-1qdIjGk (Lcom/apollographql/apollo/api/Fragment;Lokio/ByteString;Lcom/apollographql/apollo/api/Fragment$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;)Ljava/util/Set; } public final class com/apollographql/cache/normalized/ApolloStore$Companion { @@ -24,16 +24,16 @@ public final class com/apollographql/cache/normalized/ApolloStore$Companion { } public final class com/apollographql/cache/normalized/ApolloStore$DefaultImpls { - public static synthetic fun normalize$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Executable;Ljava/util/Map;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;ILjava/lang/Object;)Ljava/util/Map; - public static synthetic fun readFragment-dEpVOtE$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/ApolloStore$ReadResult; + public static synthetic fun normalize-niOPdRo$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Executable;Ljava/util/Map;Lokio/ByteString;Lcom/apollographql/apollo/api/CustomScalarAdapters;ILjava/lang/Object;)Ljava/util/Map; + public static synthetic fun readFragment-dEpVOtE$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Lokio/ByteString;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/ApolloStore$ReadResult; public static synthetic fun readOperation$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Lcom/apollographql/apollo/api/ApolloResponse; public static synthetic fun remove$default (Lcom/apollographql/cache/normalized/ApolloStore;Ljava/util/List;ZILjava/lang/Object;)I - public static synthetic fun remove-eNSUWrY$default (Lcom/apollographql/cache/normalized/ApolloStore;Ljava/lang/String;ZILjava/lang/Object;)Z - public static synthetic fun writeFragment-1qdIjGk$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/Fragment$Data;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Ljava/util/Set; + public static synthetic fun remove-eNSUWrY$default (Lcom/apollographql/cache/normalized/ApolloStore;Lokio/ByteString;ZILjava/lang/Object;)Z + public static synthetic fun writeFragment-1qdIjGk$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Lokio/ByteString;Lcom/apollographql/apollo/api/Fragment$Data;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Ljava/util/Set; public static synthetic fun writeOperation$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/Operation$Data;Ljava/util/List;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Ljava/util/Set; public static synthetic fun writeOperation$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Operation;Ljava/util/Map;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Ljava/util/Set; public static synthetic fun writeOptimisticUpdates$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/Operation$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;ILjava/lang/Object;)Ljava/util/Set; - public static synthetic fun writeOptimisticUpdates-1qdIjGk$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/Fragment$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;ILjava/lang/Object;)Ljava/util/Set; + public static synthetic fun writeOptimisticUpdates-1qdIjGk$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Lokio/ByteString;Lcom/apollographql/apollo/api/Fragment$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;ILjava/lang/Object;)Ljava/util/Set; } public final class com/apollographql/cache/normalized/ApolloStore$ReadResult { @@ -227,34 +227,36 @@ public final class com/apollographql/cache/normalized/api/CacheHeaders$Companion public final class com/apollographql/cache/normalized/api/CacheKey { public static final field Companion Lcom/apollographql/cache/normalized/api/CacheKey$Companion; - public static final synthetic fun box-impl (Ljava/lang/String;)Lcom/apollographql/cache/normalized/api/CacheKey; + public static final field HASH_SIZE_BYTES I + public static final synthetic fun box-impl (Lokio/ByteString;)Lcom/apollographql/cache/normalized/api/CacheKey; public static final fun canDeserialize (Ljava/lang/String;)Z - public static fun constructor-impl (Ljava/lang/String;Ljava/util/List;)Ljava/lang/String; - public static fun constructor-impl (Ljava/lang/String;Z)Ljava/lang/String; - public static fun constructor-impl (Ljava/lang/String;[Ljava/lang/String;)Ljava/lang/String; - public static synthetic fun constructor-impl$default (Ljava/lang/String;ZILkotlin/jvm/internal/DefaultConstructorMarker;)Ljava/lang/String; - public static final fun deserialize-gE2UBb4 (Ljava/lang/String;)Ljava/lang/String; + public static fun constructor-impl (Ljava/lang/String;)Lokio/ByteString; + public static fun constructor-impl (Ljava/lang/String;Ljava/util/List;)Lokio/ByteString; + public static fun constructor-impl (Ljava/lang/String;[Ljava/lang/String;)Lokio/ByteString; + public static fun constructor-impl (Lokio/ByteString;)Lokio/ByteString; + public static final fun deserialize-gE2UBb4 (Ljava/lang/String;)Lokio/ByteString; public fun equals (Ljava/lang/Object;)Z - public static fun equals-impl (Ljava/lang/String;Ljava/lang/Object;)Z - public static final fun equals-impl0 (Ljava/lang/String;Ljava/lang/String;)Z - public final fun getKey ()Ljava/lang/String; + public static fun equals-impl (Lokio/ByteString;Ljava/lang/Object;)Z + public static final fun equals-impl0 (Lokio/ByteString;Lokio/ByteString;)Z + public final fun getKey ()Lokio/ByteString; public fun hashCode ()I - public static fun hashCode-impl (Ljava/lang/String;)I - public static final fun rootKey-mqw0cJ0 ()Ljava/lang/String; - public static final fun serialize-impl (Ljava/lang/String;)Ljava/lang/String; + public static fun hashCode-impl (Lokio/ByteString;)I + public static final fun keyToString-impl (Lokio/ByteString;)Ljava/lang/String; + public static final fun rootKey-mqw0cJ0 ()Lokio/ByteString; + public static final fun serialize-impl (Lokio/ByteString;)Ljava/lang/String; public fun toString ()Ljava/lang/String; - public static fun toString-impl (Ljava/lang/String;)Ljava/lang/String; - public final synthetic fun unbox-impl ()Ljava/lang/String; + public static fun toString-impl (Lokio/ByteString;)Ljava/lang/String; + public final synthetic fun unbox-impl ()Lokio/ByteString; } public final class com/apollographql/cache/normalized/api/CacheKey$Companion { public final fun canDeserialize (Ljava/lang/String;)Z - public final fun deserialize-gE2UBb4 (Ljava/lang/String;)Ljava/lang/String; - public final fun rootKey-mqw0cJ0 ()Ljava/lang/String; + public final fun deserialize-gE2UBb4 (Ljava/lang/String;)Lokio/ByteString; + public final fun rootKey-mqw0cJ0 ()Lokio/ByteString; } public abstract interface class com/apollographql/cache/normalized/api/CacheKeyGenerator { - public abstract fun cacheKeyForObject-z2_y8R0 (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Ljava/lang/String; + public abstract fun cacheKeyForObject-z2_y8R0 (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Lokio/ByteString; } public final class com/apollographql/cache/normalized/api/CacheKeyGeneratorContext { @@ -264,12 +266,14 @@ public final class com/apollographql/cache/normalized/api/CacheKeyGeneratorConte } public final class com/apollographql/cache/normalized/api/CacheKeyKt { - public static final fun fieldKey-eNSUWrY (Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String; + public static final fun append-eNSUWrY (Lokio/ByteString;[Ljava/lang/String;)Lokio/ByteString; + public static final fun fieldKey-eNSUWrY (Lokio/ByteString;Ljava/lang/String;)Ljava/lang/String; + public static final fun isRootKey-pWl1Des (Lokio/ByteString;)Z } public abstract class com/apollographql/cache/normalized/api/CacheKeyResolver : com/apollographql/cache/normalized/api/CacheResolver { public fun ()V - public abstract fun cacheKeyForField-fLoEQYY (Lcom/apollographql/cache/normalized/api/ResolverContext;)Ljava/lang/String; + public abstract fun cacheKeyForField-fLoEQYY (Lcom/apollographql/cache/normalized/api/ResolverContext;)Lokio/ByteString; public fun listOfCacheKeysForField (Lcom/apollographql/cache/normalized/api/ResolverContext;)Ljava/util/List; public final fun resolveField (Lcom/apollographql/cache/normalized/api/ResolverContext;)Ljava/lang/Object; } @@ -397,14 +401,14 @@ public final class com/apollographql/cache/normalized/api/IdCacheKeyGenerator : public fun ()V public fun ([Ljava/lang/String;)V public synthetic fun ([Ljava/lang/String;ILkotlin/jvm/internal/DefaultConstructorMarker;)V - public fun cacheKeyForObject-z2_y8R0 (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Ljava/lang/String; + public fun cacheKeyForObject-z2_y8R0 (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Lokio/ByteString; } public final class com/apollographql/cache/normalized/api/IdCacheKeyResolver : com/apollographql/cache/normalized/api/CacheKeyResolver { public fun ()V public fun (Ljava/util/List;Ljava/util/List;)V public synthetic fun (Ljava/util/List;Ljava/util/List;ILkotlin/jvm/internal/DefaultConstructorMarker;)V - public fun cacheKeyForField-fLoEQYY (Lcom/apollographql/cache/normalized/api/ResolverContext;)Ljava/lang/String; + public fun cacheKeyForField-fLoEQYY (Lcom/apollographql/cache/normalized/api/ResolverContext;)Lokio/ByteString; public fun listOfCacheKeysForField (Lcom/apollographql/cache/normalized/api/ResolverContext;)Ljava/util/List; } @@ -458,7 +462,7 @@ public abstract interface class com/apollographql/cache/normalized/api/Normalize public abstract fun merge (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public static fun prettifyDump (Ljava/util/Map;)Ljava/lang/String; public abstract fun remove (Ljava/util/Collection;Z)I - public abstract fun remove-eNSUWrY (Ljava/lang/String;Z)Z + public abstract fun remove-eNSUWrY (Lokio/ByteString;Z)Z public abstract fun trim (JF)J } @@ -478,14 +482,14 @@ public abstract class com/apollographql/cache/normalized/api/NormalizedCacheFact public abstract interface class com/apollographql/cache/normalized/api/ReadOnlyNormalizedCache { public abstract fun dump ()Ljava/util/Map; - public abstract fun loadRecord-eNSUWrY (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; + public abstract fun loadRecord-eNSUWrY (Lokio/ByteString;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; public abstract fun loadRecords (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Collection; } public final class com/apollographql/cache/normalized/api/Record : java/util/Map, kotlin/jvm/internal/markers/KMappedMarker { public static final field Companion Lcom/apollographql/cache/normalized/api/Record$Companion; - public synthetic fun (Ljava/lang/String;Ljava/util/Map;Ljava/util/UUID;Ljava/util/Map;ILkotlin/jvm/internal/DefaultConstructorMarker;)V - public synthetic fun (Ljava/lang/String;Ljava/util/Map;Ljava/util/UUID;Ljava/util/Map;Lkotlin/jvm/internal/DefaultConstructorMarker;)V + public synthetic fun (Lokio/ByteString;Ljava/util/Map;Ljava/util/UUID;Ljava/util/Map;ILkotlin/jvm/internal/DefaultConstructorMarker;)V + public synthetic fun (Lokio/ByteString;Ljava/util/Map;Ljava/util/UUID;Ljava/util/Map;Lkotlin/jvm/internal/DefaultConstructorMarker;)V public fun clear ()V public synthetic fun compute (Ljava/lang/Object;Ljava/util/function/BiFunction;)Ljava/lang/Object; public fun compute (Ljava/lang/String;Ljava/util/function/BiFunction;)Ljava/lang/Object; @@ -502,7 +506,7 @@ public final class com/apollographql/cache/normalized/api/Record : java/util/Map public fun get (Ljava/lang/String;)Ljava/lang/Object; public fun getEntries ()Ljava/util/Set; public final fun getFields ()Ljava/util/Map; - public final fun getKey-mqw0cJ0 ()Ljava/lang/String; + public final fun getKey-mqw0cJ0 ()Lokio/ByteString; public fun getKeys ()Ljava/util/Set; public final fun getMetadata ()Ljava/util/Map; public final fun getMutationId ()Ljava/util/UUID; @@ -557,12 +561,12 @@ public final class com/apollographql/cache/normalized/api/RecordMergerKt { } public final class com/apollographql/cache/normalized/api/ResolverContext { - public synthetic fun (Lcom/apollographql/apollo/api/CompiledField;Lcom/apollographql/apollo/api/Executable$Variables;Ljava/util/Map;Ljava/lang/String;Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Ljava/util/List;Lkotlin/jvm/internal/DefaultConstructorMarker;)V + public synthetic fun (Lcom/apollographql/apollo/api/CompiledField;Lcom/apollographql/apollo/api/Executable$Variables;Ljava/util/Map;Lokio/ByteString;Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Ljava/util/List;Lkotlin/jvm/internal/DefaultConstructorMarker;)V public final fun getCacheHeaders ()Lcom/apollographql/cache/normalized/api/CacheHeaders; public final fun getField ()Lcom/apollographql/apollo/api/CompiledField; public final fun getFieldKeyGenerator ()Lcom/apollographql/cache/normalized/api/FieldKeyGenerator; public final fun getParent ()Ljava/util/Map; - public final fun getParentKey-mqw0cJ0 ()Ljava/lang/String; + public final fun getParentKey-mqw0cJ0 ()Lokio/ByteString; public final fun getParentType ()Ljava/lang/String; public final fun getPath ()Ljava/util/List; public final fun getVariables ()Lcom/apollographql/apollo/api/Executable$Variables; @@ -575,15 +579,14 @@ public final class com/apollographql/cache/normalized/api/SchemaCoordinatesMaxAg public final class com/apollographql/cache/normalized/api/TypePolicyCacheKeyGenerator : com/apollographql/cache/normalized/api/CacheKeyGenerator { public static final field INSTANCE Lcom/apollographql/cache/normalized/api/TypePolicyCacheKeyGenerator; - public fun cacheKeyForObject-z2_y8R0 (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Ljava/lang/String; + public fun cacheKeyForObject-z2_y8R0 (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Lokio/ByteString; } public final class com/apollographql/cache/normalized/internal/NormalizerKt { - public static final fun hashed (Ljava/lang/String;)Ljava/lang/String; - public static final fun normalized (Lcom/apollographql/apollo/api/Executable$Data;Lcom/apollographql/apollo/api/Executable;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;)Ljava/util/Map; - public static final fun normalized (Ljava/util/Map;Lcom/apollographql/apollo/api/Executable;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;)Ljava/util/Map; - public static synthetic fun normalized$default (Lcom/apollographql/apollo/api/Executable$Data;Lcom/apollographql/apollo/api/Executable;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;ILjava/lang/Object;)Ljava/util/Map; - public static synthetic fun normalized$default (Ljava/util/Map;Lcom/apollographql/apollo/api/Executable;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;ILjava/lang/Object;)Ljava/util/Map; + public static final fun normalized-MplSeLY (Lcom/apollographql/apollo/api/Executable$Data;Lcom/apollographql/apollo/api/Executable;Lokio/ByteString;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;)Ljava/util/Map; + public static final fun normalized-MplSeLY (Ljava/util/Map;Lcom/apollographql/apollo/api/Executable;Lokio/ByteString;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;)Ljava/util/Map; + public static synthetic fun normalized-MplSeLY$default (Lcom/apollographql/apollo/api/Executable$Data;Lcom/apollographql/apollo/api/Executable;Lokio/ByteString;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;ILjava/lang/Object;)Ljava/util/Map; + public static synthetic fun normalized-MplSeLY$default (Ljava/util/Map;Lcom/apollographql/apollo/api/Executable;Lokio/ByteString;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;ILjava/lang/Object;)Ljava/util/Map; } public final class com/apollographql/cache/normalized/memory/MemoryCache : com/apollographql/cache/normalized/api/NormalizedCache { @@ -593,12 +596,12 @@ public final class com/apollographql/cache/normalized/memory/MemoryCache : com/a public fun clearAll ()V public fun dump ()Ljava/util/Map; public final fun getSize ()I - public fun loadRecord-eNSUWrY (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; + public fun loadRecord-eNSUWrY (Lokio/ByteString;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; public fun loadRecords (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Collection; public fun merge (Lcom/apollographql/cache/normalized/api/Record;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public fun merge (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public fun remove (Ljava/util/Collection;Z)I - public fun remove-eNSUWrY (Ljava/lang/String;Z)Z + public fun remove-eNSUWrY (Lokio/ByteString;Z)Z public fun trim (JF)J } diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api index bff69e32..2265b2a8 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api @@ -80,7 +80,7 @@ abstract interface com.apollographql.cache.normalized/ApolloStore { // com.apoll abstract val changedKeys // com.apollographql.cache.normalized/ApolloStore.changedKeys|{}changedKeys[0] abstract fun (): kotlinx.coroutines.flow/SharedFlow> // com.apollographql.cache.normalized/ApolloStore.changedKeys.|(){}[0] - abstract fun <#A1: com.apollographql.apollo.api/Executable.Data> normalize(com.apollographql.apollo.api/Executable<#A1>, kotlin.collections/Map, kotlin/String = ..., com.apollographql.apollo.api/CustomScalarAdapters = ...): kotlin.collections/Map // com.apollographql.cache.normalized/ApolloStore.normalize|normalize(com.apollographql.apollo.api.Executable<0:0>;kotlin.collections.Map;kotlin.String;com.apollographql.apollo.api.CustomScalarAdapters){0§}[0] + abstract fun <#A1: com.apollographql.apollo.api/Executable.Data> normalize(com.apollographql.apollo.api/Executable<#A1>, kotlin.collections/Map, com.apollographql.cache.normalized.api/CacheKey = ..., com.apollographql.apollo.api/CustomScalarAdapters = ...): kotlin.collections/Map // com.apollographql.cache.normalized/ApolloStore.normalize|normalize(com.apollographql.apollo.api.Executable<0:0>;kotlin.collections.Map;com.apollographql.cache.normalized.api.CacheKey;com.apollographql.apollo.api.CustomScalarAdapters){0§}[0] abstract fun <#A1: com.apollographql.apollo.api/Fragment.Data> readFragment(com.apollographql.apollo.api/Fragment<#A1>, com.apollographql.cache.normalized.api/CacheKey, com.apollographql.apollo.api/CustomScalarAdapters = ..., com.apollographql.cache.normalized.api/CacheHeaders = ...): com.apollographql.cache.normalized/ApolloStore.ReadResult<#A1> // com.apollographql.cache.normalized/ApolloStore.readFragment|readFragment(com.apollographql.apollo.api.Fragment<0:0>;com.apollographql.cache.normalized.api.CacheKey;com.apollographql.apollo.api.CustomScalarAdapters;com.apollographql.cache.normalized.api.CacheHeaders){0§}[0] abstract fun <#A1: com.apollographql.apollo.api/Fragment.Data> writeFragment(com.apollographql.apollo.api/Fragment<#A1>, com.apollographql.cache.normalized.api/CacheKey, #A1, com.apollographql.apollo.api/CustomScalarAdapters = ..., com.apollographql.cache.normalized.api/CacheHeaders = ...): kotlin.collections/Set // com.apollographql.cache.normalized/ApolloStore.writeFragment|writeFragment(com.apollographql.apollo.api.Fragment<0:0>;com.apollographql.cache.normalized.api.CacheKey;0:0;com.apollographql.apollo.api.CustomScalarAdapters;com.apollographql.cache.normalized.api.CacheHeaders){0§}[0] abstract fun <#A1: com.apollographql.apollo.api/Fragment.Data> writeOptimisticUpdates(com.apollographql.apollo.api/Fragment<#A1>, com.apollographql.cache.normalized.api/CacheKey, #A1, com.benasher44.uuid/Uuid, com.apollographql.apollo.api/CustomScalarAdapters = ...): kotlin.collections/Set // com.apollographql.cache.normalized/ApolloStore.writeOptimisticUpdates|writeOptimisticUpdates(com.apollographql.apollo.api.Fragment<0:0>;com.apollographql.cache.normalized.api.CacheKey;0:0;com.benasher44.uuid.Uuid;com.apollographql.apollo.api.CustomScalarAdapters){0§}[0] @@ -461,19 +461,24 @@ final class com.apollographql.cache.normalized/RemovedFieldsAndRecords { // com. } final value class com.apollographql.cache.normalized.api/CacheKey { // com.apollographql.cache.normalized.api/CacheKey|null[0] + constructor (kotlin/String) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String){}[0] constructor (kotlin/String, kotlin.collections/List) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String;kotlin.collections.List){}[0] constructor (kotlin/String, kotlin/Array...) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String;kotlin.Array...){}[0] - constructor (kotlin/String, kotlin/Boolean = ...) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String;kotlin.Boolean){}[0] + constructor (okio/ByteString) // com.apollographql.cache.normalized.api/CacheKey.|(okio.ByteString){}[0] final val key // com.apollographql.cache.normalized.api/CacheKey.key|{}key[0] - final fun (): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.key.|(){}[0] + final fun (): okio/ByteString // com.apollographql.cache.normalized.api/CacheKey.key.|(){}[0] final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.api/CacheKey.equals|equals(kotlin.Any?){}[0] final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.api/CacheKey.hashCode|hashCode(){}[0] + final fun keyToString(): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.keyToString|keyToString(){}[0] final fun serialize(): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.serialize|serialize(){}[0] final fun toString(): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.toString|toString(){}[0] final object Companion { // com.apollographql.cache.normalized.api/CacheKey.Companion|null[0] + final const val HASH_SIZE_BYTES // com.apollographql.cache.normalized.api/CacheKey.Companion.HASH_SIZE_BYTES|{}HASH_SIZE_BYTES[0] + final fun (): kotlin/Int // com.apollographql.cache.normalized.api/CacheKey.Companion.HASH_SIZE_BYTES.|(){}[0] + final fun canDeserialize(kotlin/String): kotlin/Boolean // com.apollographql.cache.normalized.api/CacheKey.Companion.canDeserialize|canDeserialize(kotlin.String){}[0] final fun deserialize(kotlin/String): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.api/CacheKey.Companion.deserialize|deserialize(kotlin.String){}[0] final fun rootKey(): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.api/CacheKey.Companion.rootKey|rootKey(){}[0] @@ -558,7 +563,9 @@ final val com.apollographql.cache.normalized/isFromCache // com.apollographql.ca final fun (com.apollographql.apollo/ApolloClient.Builder).com.apollographql.cache.normalized/logCacheMisses(kotlin/Function1 = ...): com.apollographql.apollo/ApolloClient.Builder // com.apollographql.cache.normalized/logCacheMisses|logCacheMisses@com.apollographql.apollo.ApolloClient.Builder(kotlin.Function1){}[0] final fun (com.apollographql.apollo/ApolloClient.Builder).com.apollographql.cache.normalized/normalizedCache(com.apollographql.cache.normalized.api/NormalizedCacheFactory, com.apollographql.cache.normalized.api/CacheKeyGenerator = ..., com.apollographql.cache.normalized.api/MetadataGenerator = ..., com.apollographql.cache.normalized.api/CacheResolver = ..., com.apollographql.cache.normalized.api/RecordMerger = ..., com.apollographql.cache.normalized.api/FieldKeyGenerator = ..., com.apollographql.cache.normalized.api/EmbeddedFieldsProvider = ..., kotlin/Boolean = ...): com.apollographql.apollo/ApolloClient.Builder // com.apollographql.cache.normalized/normalizedCache|normalizedCache@com.apollographql.apollo.ApolloClient.Builder(com.apollographql.cache.normalized.api.NormalizedCacheFactory;com.apollographql.cache.normalized.api.CacheKeyGenerator;com.apollographql.cache.normalized.api.MetadataGenerator;com.apollographql.cache.normalized.api.CacheResolver;com.apollographql.cache.normalized.api.RecordMerger;com.apollographql.cache.normalized.api.FieldKeyGenerator;com.apollographql.cache.normalized.api.EmbeddedFieldsProvider;kotlin.Boolean){}[0] final fun (com.apollographql.apollo/ApolloClient.Builder).com.apollographql.cache.normalized/store(com.apollographql.cache.normalized/ApolloStore, kotlin/Boolean = ...): com.apollographql.apollo/ApolloClient.Builder // com.apollographql.cache.normalized/store|store@com.apollographql.apollo.ApolloClient.Builder(com.apollographql.cache.normalized.ApolloStore;kotlin.Boolean){}[0] +final fun (com.apollographql.cache.normalized.api/CacheKey).com.apollographql.cache.normalized.api/append(kotlin/Array...): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.api/append|append@com.apollographql.cache.normalized.api.CacheKey(kotlin.Array...){}[0] final fun (com.apollographql.cache.normalized.api/CacheKey).com.apollographql.cache.normalized.api/fieldKey(kotlin/String): kotlin/String // com.apollographql.cache.normalized.api/fieldKey|fieldKey@com.apollographql.cache.normalized.api.CacheKey(kotlin.String){}[0] +final fun (com.apollographql.cache.normalized.api/CacheKey).com.apollographql.cache.normalized.api/isRootKey(): kotlin/Boolean // com.apollographql.cache.normalized.api/isRootKey|isRootKey@com.apollographql.cache.normalized.api.CacheKey(){}[0] final fun (com.apollographql.cache.normalized.api/NormalizedCache).com.apollographql.cache.normalized/allRecords(): kotlin.collections/Map // com.apollographql.cache.normalized/allRecords|allRecords@com.apollographql.cache.normalized.api.NormalizedCache(){}[0] final fun (com.apollographql.cache.normalized.api/NormalizedCache).com.apollographql.cache.normalized/garbageCollect(com.apollographql.cache.normalized.api/MaxAgeProvider, kotlin.time/Duration = ...): com.apollographql.cache.normalized/GarbageCollectResult // com.apollographql.cache.normalized/garbageCollect|garbageCollect@com.apollographql.cache.normalized.api.NormalizedCache(com.apollographql.cache.normalized.api.MaxAgeProvider;kotlin.time.Duration){}[0] final fun (com.apollographql.cache.normalized.api/NormalizedCache).com.apollographql.cache.normalized/removeDanglingReferences(): com.apollographql.cache.normalized/RemovedFieldsAndRecords // com.apollographql.cache.normalized/removeDanglingReferences|removeDanglingReferences@com.apollographql.cache.normalized.api.NormalizedCache(){}[0] @@ -574,10 +581,9 @@ final fun (com.apollographql.cache.normalized/ApolloStore).com.apollographql.cac final fun (com.apollographql.cache.normalized/ApolloStore).com.apollographql.cache.normalized/removeUnreachableRecords(): kotlin.collections/Set // com.apollographql.cache.normalized/removeUnreachableRecords|removeUnreachableRecords@com.apollographql.cache.normalized.ApolloStore(){}[0] final fun (kotlin.collections/Collection?).com.apollographql.cache.normalized.api/dependentKeys(): kotlin.collections/Set // com.apollographql.cache.normalized.api/dependentKeys|dependentKeys@kotlin.collections.Collection?(){}[0] final fun (kotlin.collections/Map).com.apollographql.cache.normalized/getReachableCacheKeys(): kotlin.collections/Set // com.apollographql.cache.normalized/getReachableCacheKeys|getReachableCacheKeys@kotlin.collections.Map(){}[0] -final fun (kotlin/String).com.apollographql.cache.normalized.internal/hashed(): kotlin/String // com.apollographql.cache.normalized.internal/hashed|hashed@kotlin.String(){}[0] final fun <#A: com.apollographql.apollo.api/Executable.Data> (#A).com.apollographql.cache.normalized.api/withErrors(com.apollographql.apollo.api/Executable<#A>, kotlin.collections/List?, com.apollographql.apollo.api/CustomScalarAdapters = ...): kotlin.collections/Map // com.apollographql.cache.normalized.api/withErrors|withErrors@0:0(com.apollographql.apollo.api.Executable<0:0>;kotlin.collections.List?;com.apollographql.apollo.api.CustomScalarAdapters){0§}[0] -final fun <#A: com.apollographql.apollo.api/Executable.Data> (#A).com.apollographql.cache.normalized.internal/normalized(com.apollographql.apollo.api/Executable<#A>, kotlin/String = ..., com.apollographql.apollo.api/CustomScalarAdapters = ..., com.apollographql.cache.normalized.api/CacheKeyGenerator = ..., com.apollographql.cache.normalized.api/MetadataGenerator = ..., com.apollographql.cache.normalized.api/FieldKeyGenerator = ..., com.apollographql.cache.normalized.api/EmbeddedFieldsProvider = ...): kotlin.collections/Map // com.apollographql.cache.normalized.internal/normalized|normalized@0:0(com.apollographql.apollo.api.Executable<0:0>;kotlin.String;com.apollographql.apollo.api.CustomScalarAdapters;com.apollographql.cache.normalized.api.CacheKeyGenerator;com.apollographql.cache.normalized.api.MetadataGenerator;com.apollographql.cache.normalized.api.FieldKeyGenerator;com.apollographql.cache.normalized.api.EmbeddedFieldsProvider){0§}[0] -final fun <#A: com.apollographql.apollo.api/Executable.Data> (kotlin.collections/Map).com.apollographql.cache.normalized.internal/normalized(com.apollographql.apollo.api/Executable<#A>, kotlin/String = ..., com.apollographql.apollo.api/CustomScalarAdapters = ..., com.apollographql.cache.normalized.api/CacheKeyGenerator = ..., com.apollographql.cache.normalized.api/MetadataGenerator = ..., com.apollographql.cache.normalized.api/FieldKeyGenerator = ..., com.apollographql.cache.normalized.api/EmbeddedFieldsProvider = ...): kotlin.collections/Map // com.apollographql.cache.normalized.internal/normalized|normalized@kotlin.collections.Map(com.apollographql.apollo.api.Executable<0:0>;kotlin.String;com.apollographql.apollo.api.CustomScalarAdapters;com.apollographql.cache.normalized.api.CacheKeyGenerator;com.apollographql.cache.normalized.api.MetadataGenerator;com.apollographql.cache.normalized.api.FieldKeyGenerator;com.apollographql.cache.normalized.api.EmbeddedFieldsProvider){0§}[0] +final fun <#A: com.apollographql.apollo.api/Executable.Data> (#A).com.apollographql.cache.normalized.internal/normalized(com.apollographql.apollo.api/Executable<#A>, com.apollographql.cache.normalized.api/CacheKey = ..., com.apollographql.apollo.api/CustomScalarAdapters = ..., com.apollographql.cache.normalized.api/CacheKeyGenerator = ..., com.apollographql.cache.normalized.api/MetadataGenerator = ..., com.apollographql.cache.normalized.api/FieldKeyGenerator = ..., com.apollographql.cache.normalized.api/EmbeddedFieldsProvider = ...): kotlin.collections/Map // com.apollographql.cache.normalized.internal/normalized|normalized@0:0(com.apollographql.apollo.api.Executable<0:0>;com.apollographql.cache.normalized.api.CacheKey;com.apollographql.apollo.api.CustomScalarAdapters;com.apollographql.cache.normalized.api.CacheKeyGenerator;com.apollographql.cache.normalized.api.MetadataGenerator;com.apollographql.cache.normalized.api.FieldKeyGenerator;com.apollographql.cache.normalized.api.EmbeddedFieldsProvider){0§}[0] +final fun <#A: com.apollographql.apollo.api/Executable.Data> (kotlin.collections/Map).com.apollographql.cache.normalized.internal/normalized(com.apollographql.apollo.api/Executable<#A>, com.apollographql.cache.normalized.api/CacheKey = ..., com.apollographql.apollo.api/CustomScalarAdapters = ..., com.apollographql.cache.normalized.api/CacheKeyGenerator = ..., com.apollographql.cache.normalized.api/MetadataGenerator = ..., com.apollographql.cache.normalized.api/FieldKeyGenerator = ..., com.apollographql.cache.normalized.api/EmbeddedFieldsProvider = ...): kotlin.collections/Map // com.apollographql.cache.normalized.internal/normalized|normalized@kotlin.collections.Map(com.apollographql.apollo.api.Executable<0:0>;com.apollographql.cache.normalized.api.CacheKey;com.apollographql.apollo.api.CustomScalarAdapters;com.apollographql.cache.normalized.api.CacheKeyGenerator;com.apollographql.cache.normalized.api.MetadataGenerator;com.apollographql.cache.normalized.api.FieldKeyGenerator;com.apollographql.cache.normalized.api.EmbeddedFieldsProvider){0§}[0] final fun <#A: com.apollographql.apollo.api/Mutation.Data> (com.apollographql.apollo.api/ApolloRequest.Builder<#A>).com.apollographql.cache.normalized/optimisticUpdates(#A): com.apollographql.apollo.api/ApolloRequest.Builder<#A> // com.apollographql.cache.normalized/optimisticUpdates|optimisticUpdates@com.apollographql.apollo.api.ApolloRequest.Builder<0:0>(0:0){0§}[0] final fun <#A: com.apollographql.apollo.api/Mutation.Data> (com.apollographql.apollo/ApolloCall<#A>).com.apollographql.cache.normalized/optimisticUpdates(#A): com.apollographql.apollo/ApolloCall<#A> // com.apollographql.cache.normalized/optimisticUpdates|optimisticUpdates@com.apollographql.apollo.ApolloCall<0:0>(0:0){0§}[0] final fun <#A: com.apollographql.apollo.api/Operation.Data> (com.apollographql.apollo.api/ApolloRequest.Builder<#A>).com.apollographql.cache.normalized/fetchFromCache(kotlin/Boolean): com.apollographql.apollo.api/ApolloRequest.Builder<#A> // com.apollographql.cache.normalized/fetchFromCache|fetchFromCache@com.apollographql.apollo.api.ApolloRequest.Builder<0:0>(kotlin.Boolean){0§}[0] diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt index d1437da5..0727e342 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt @@ -244,7 +244,7 @@ interface ApolloStore { fun normalize( executable: Executable, dataWithErrors: DataWithErrors, - rootKey: String = CacheKey.rootKey().key, + rootKey: CacheKey = CacheKey.rootKey(), customScalarAdapters: CustomScalarAdapters = CustomScalarAdapters.Empty, ): Map @@ -313,7 +313,7 @@ internal fun ApolloStore.cacheDumpProvider(): () -> Map cacheClass.normalizedCacheName() to cacheRecords - .mapKeys { (key, _) -> key.key } + .mapKeys { (key, _) -> key.keyToString() } .mapValues { (_, record) -> record.size to record.fields.mapValues { (_, value) -> value.toExternal() diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt index 75f57949..91444b76 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt @@ -1,7 +1,10 @@ package com.apollographql.cache.normalized.api import com.apollographql.apollo.annotations.ApolloInternal -import com.apollographql.cache.normalized.internal.hashed +import com.apollographql.cache.normalized.api.CacheKey.Companion.HASH_SIZE_BYTES +import okio.Buffer +import okio.ByteString +import okio.ByteString.Companion.encodeUtf8 import kotlin.jvm.JvmInline import kotlin.jvm.JvmStatic @@ -9,25 +12,18 @@ import kotlin.jvm.JvmStatic * A [CacheKey] identifies an object in the cache. */ @JvmInline -value class CacheKey private constructor( +value class CacheKey( /** * The hashed key of the object in the cache. */ - val key: String, + val key: ByteString, ) { /** * Builds a [CacheKey] from a key. * * @param key The key of the object in the cache. The key must be globally unique. - * @param isHashed If true, the key is already hashed. If false, the key will be hashed. */ - constructor(key: String, isHashed: Boolean = false) : this( - if (isHashed || key == rootKey().key) { - key - } else { - key.hashed() - } - ) + constructor(key: String) : this(key.hashed()) /** * Builds a [CacheKey] from a typename and a list of Strings. @@ -41,8 +37,7 @@ value class CacheKey private constructor( values.forEach { append(it) } - }, - isHashed = false, + } ) /** @@ -52,10 +47,14 @@ value class CacheKey private constructor( */ constructor(typename: String, vararg values: String) : this(typename, values.toList()) - override fun toString() = "CacheKey($key)" + fun keyToString(): String { + return key.hex() + } + + override fun toString() = "CacheKey(${keyToString()})" fun serialize(): String { - return "$SERIALIZATION_TEMPLATE{$key}" + return "$SERIALIZATION_TEMPLATE{${keyToString()}}" } companion object { @@ -85,10 +84,34 @@ value class CacheKey private constructor( fun rootKey(): CacheKey { return ROOT_CACHE_KEY } + + @ApolloInternal + const val HASH_SIZE_BYTES = 10 } } +fun CacheKey.isRootKey(): Boolean { + return this == CacheKey.rootKey() +} + @ApolloInternal fun CacheKey.fieldKey(fieldName: String): String { - return "$key.$fieldName" + return "${keyToString()}.$fieldName" +} + +private fun String.hashed(): ByteString { + return encodeUtf8().hashed() +} + +private fun ByteString.hashed(): ByteString { + return sha256().substring(endIndex = HASH_SIZE_BYTES) +} + +@ApolloInternal +fun CacheKey.append(vararg keys: String): CacheKey { + var cacheKey: CacheKey = this + for (key in keys) { + cacheKey = CacheKey(Buffer().write(cacheKey.key).write(key.encodeUtf8()).readByteString().hashed()) + } + return cacheKey } diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheResolver.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheResolver.kt index 57cd921b..ccef9cc0 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheResolver.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheResolver.kt @@ -135,7 +135,7 @@ object DefaultCacheResolver : CacheResolver { override fun resolveField(context: ResolverContext): Any? { val fieldKey = context.getFieldKey() if (!context.parent.containsKey(fieldKey)) { - throw CacheMissException(context.parentKey.key, fieldKey) + throw CacheMissException(context.parentKey.keyToString(), fieldKey) } return context.parent[fieldKey] @@ -190,7 +190,7 @@ class CacheControlCacheResolver( val maxStale = context.cacheHeaders.headerValue(ApolloCacheHeaders.MAX_STALE)?.toLongOrNull() ?: 0L if (staleDuration >= maxStale) { throw CacheMissException( - key = context.parentKey.key, + key = context.parentKey.keyToString(), fieldName = context.getFieldKey(), stale = true ) @@ -206,7 +206,7 @@ class CacheControlCacheResolver( val maxStale = context.cacheHeaders.headerValue(ApolloCacheHeaders.MAX_STALE)?.toLongOrNull() ?: 0L if (staleDuration >= maxStale) { throw CacheMissException( - key = context.parentKey.key, + key = context.parentKey.keyToString(), fieldName = context.getFieldKey(), stale = true ) diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/NormalizedCache.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/NormalizedCache.kt index 44d954a6..f99823c6 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/NormalizedCache.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/NormalizedCache.kt @@ -129,7 +129,7 @@ interface NormalizedCache : ReadOnlyNormalizedCache { indent(level + 1) append(when (key) { is KClass<*> -> key.simpleName - is CacheKey -> key.key + is CacheKey -> key.keyToString() else -> key } ) diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/CacheBatchReader.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/CacheBatchReader.kt index 323102da..1fa57607 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/CacheBatchReader.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/CacheBatchReader.kt @@ -117,10 +117,10 @@ internal class CacheBatchReader( } else { if (returnPartialResponses) { data[pendingReference.path] = - cacheMissError(CacheMissException(key = pendingReference.key.key, fieldName = null, stale = false), path = pendingReference.path) + cacheMissError(CacheMissException(key = pendingReference.key.keyToString(), fieldName = null, stale = false), path = pendingReference.path) return@forEach } else { - throw CacheMissException(pendingReference.key.key) + throw CacheMissException(pendingReference.key.keyToString()) } } } @@ -224,7 +224,7 @@ internal class CacheBatchReader( field = it, variables = variables, parent = this, - parentKey = CacheKey("", isHashed = true), + parentKey = CacheKey(""), parentType = parentType, cacheHeaders = cacheHeaders, fieldKeyGenerator = fieldKeyGenerator, diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt index c2009e67..d438689a 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt @@ -105,7 +105,7 @@ internal class DefaultApolloStore( override fun normalize( executable: Executable, dataWithErrors: DataWithErrors, - rootKey: String, + rootKey: CacheKey, customScalarAdapters: CustomScalarAdapters, ): Map { return dataWithErrors.normalized( @@ -239,7 +239,7 @@ internal class DefaultApolloStore( val records = normalize( executable = fragment, dataWithErrors = dataWithErrors, - rootKey = cacheKey.key, + rootKey = cacheKey, customScalarAdapters = customScalarAdapters, ).values return cache.merge(records, cacheHeaders, recordMerger) @@ -281,7 +281,7 @@ internal class DefaultApolloStore( val records = normalize( executable = fragment, dataWithErrors = dataWithErrors, - rootKey = cacheKey.key, + rootKey = cacheKey, customScalarAdapters = customScalarAdapters, ).values.map { record -> Record( diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt index c1649544..e15cdffc 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt @@ -27,6 +27,8 @@ import com.apollographql.cache.normalized.api.MetadataGenerator import com.apollographql.cache.normalized.api.MetadataGeneratorContext import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.TypePolicyCacheKeyGenerator +import com.apollographql.cache.normalized.api.append +import com.apollographql.cache.normalized.api.isRootKey import com.apollographql.cache.normalized.api.withErrors /** @@ -35,7 +37,7 @@ import com.apollographql.cache.normalized.api.withErrors */ internal class Normalizer( private val variables: Executable.Variables, - private val rootKey: String, + private val rootKey: CacheKey, private val cacheKeyGenerator: CacheKeyGenerator, private val metadataGenerator: MetadataGenerator, private val fieldKeyGenerator: FieldKeyGenerator, @@ -68,7 +70,7 @@ internal class Normalizer( */ private fun buildFields( obj: DataWithErrors, - key: String, + key: CacheKey, selections: List, parentType: CompiledNamedType, ): Map { @@ -99,7 +101,7 @@ internal class Normalizer( val fieldKey = fieldKeyGenerator.getFieldKey(FieldKeyContext(parentType.name, mergedField, variables)) - val base = if (key == CacheKey.rootKey().key) { + val base = if (key.isRootKey()) { // If we're at the root level, skip `QUERY_ROOT` altogether to save a few bytes null } else { @@ -109,7 +111,7 @@ internal class Normalizer( value = entry.value, field = mergedField, type_ = mergedField.type, - path = base.append(fieldKey), + path = base?.append(fieldKey) ?: CacheKey(fieldKey), embeddedFields = embeddedFieldsProvider.getEmbeddedFields(EmbeddedFieldsContext(parentType)), ) val metadata = metadataGenerator.metadataForObject(entry.value, MetadataGeneratorContext(field = mergedField, variables)) @@ -120,23 +122,20 @@ internal class Normalizer( } /** - * - * * @param obj the json node representing the object - * @param key the key for this record + * @param cacheKey the key for this record * @param selections the selections queried on this object * @return the CacheKey if this object has a CacheKey or the new Map if the object was embedded */ private fun buildRecord( obj: DataWithErrors, - key: String, + cacheKey: CacheKey, selections: List, parentType: CompiledNamedType, ): CacheKey { - val fields = buildFields(obj, key, selections, parentType) + val fields = buildFields(obj, cacheKey, selections, parentType) val fieldValues = fields.mapValues { it.value.fieldValue } val metadata = fields.mapValues { it.value.metadata }.filterValues { it.isNotEmpty() } - val cacheKey = CacheKey(key, isHashed = true) val record = Record( key = cacheKey, fields = fieldValues, @@ -174,7 +173,7 @@ internal class Normalizer( value: Any?, field: CompiledField, type_: CompiledType, - path: String, + path: CacheKey, embeddedFields: List, ): Any? { /** @@ -207,10 +206,10 @@ internal class Normalizer( var key = cacheKeyGenerator.cacheKeyForObject( value as Map, CacheKeyGeneratorContext(field, variables), - )?.key + ) if (key == null) { - key = CacheKey(path).key + key = path } if (embeddedFields.contains(field.name)) { buildFields(value, key, field.selections, field.type.rawType()) @@ -257,9 +256,6 @@ internal class Normalizer( collectFields(selections, parentType, typename, state) return state.fields } - - // The receiver can be null for the root query to save some space in the cache by not storing QUERY_ROOT all over the place - private fun String?.append(next: String): String = if (this == null) next else "$this.$next" } /** @@ -267,7 +263,7 @@ internal class Normalizer( */ fun D.normalized( executable: Executable, - rootKey: String = CacheKey.rootKey().key, + rootKey: CacheKey = CacheKey.rootKey(), customScalarAdapters: CustomScalarAdapters = CustomScalarAdapters.Empty, cacheKeyGenerator: CacheKeyGenerator = TypePolicyCacheKeyGenerator, metadataGenerator: MetadataGenerator = EmptyMetadataGenerator, @@ -283,7 +279,7 @@ fun D.normalized( */ fun DataWithErrors.normalized( executable: Executable, - rootKey: String = CacheKey.rootKey().key, + rootKey: CacheKey = CacheKey.rootKey(), customScalarAdapters: CustomScalarAdapters = CustomScalarAdapters.Empty, cacheKeyGenerator: CacheKeyGenerator = TypePolicyCacheKeyGenerator, metadataGenerator: MetadataGenerator = EmptyMetadataGenerator, @@ -294,9 +290,3 @@ fun DataWithErrors.normalized( return Normalizer(variables, rootKey, cacheKeyGenerator, metadataGenerator, fieldKeyGenerator, embeddedFieldsProvider) .normalize(this, executable.rootField().selections, executable.rootField().type.rawType()) } - - -@OptIn(ExperimentalStdlibApi::class) -fun String.hashed(): String { - return hashCode().toHexString() -} diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/RecordWeigher.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/RecordWeigher.kt index ff780c4f..b5fce288 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/RecordWeigher.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/RecordWeigher.kt @@ -27,7 +27,7 @@ internal object RecordWeigher { @JvmStatic fun calculateBytes(record: Record): Int { - var size = SIZE_OF_RECORD_OVERHEAD + record.key.key.length + var size = SIZE_OF_RECORD_OVERHEAD + record.key.key.size for ((key, value) in record.fields) { size += key.length + weighField(value) } @@ -56,7 +56,7 @@ internal object RecordWeigher { } is CacheKey -> { - SIZE_OF_CACHE_KEY_OVERHEAD + field.key.commonAsUtf8ToByteArray().size + SIZE_OF_CACHE_KEY_OVERHEAD + field.key.size } is Error -> { diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt index 7bc43088..7108df76 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt @@ -40,7 +40,7 @@ class MemoryCache( } private val lruCache = LruCache(maxSize = maxSizeBytes, expireAfterMillis = expireAfterMillis) { key, record -> - key.key.length + record.sizeInBytes + key.key.size + record.sizeInBytes } val size: Int diff --git a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/CacheKeyResolverTest.kt b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/CacheKeyResolverTest.kt index c180b304..9bf2d916 100644 --- a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/CacheKeyResolverTest.kt +++ b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/CacheKeyResolverTest.kt @@ -41,7 +41,7 @@ class CacheKeyResolverTest { field, Executable.Variables(emptyMap()), emptyMap(), - CacheKey("", isHashed = true), + CacheKey(""), "", CacheHeaders(emptyMap()), DefaultFieldKeyGenerator, diff --git a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/MemoryCacheTest.kt b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/MemoryCacheTest.kt index 90bd316c..2677164e 100644 --- a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/MemoryCacheTest.kt +++ b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/MemoryCacheTest.kt @@ -65,7 +65,7 @@ class MemoryCacheTest { val lruCache = createCache( // all records won't fit as there is timestamp that stored with each record - maxSizeBytes = 342 + maxSizeBytes = 600 ) val records = listOf(testRecord1, testRecord2, testRecord3) @@ -85,7 +85,7 @@ class MemoryCacheTest { val testRecord3 = createTestRecord("3") val lruCache = createCache( - maxSizeBytes = 800 + maxSizeBytes = 2000 ) val records = listOf(testRecord1, testRecord2, testRecord3) diff --git a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/RecordWeigherTest.kt b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/RecordWeigherTest.kt index 51cac0df..e327ac8c 100644 --- a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/RecordWeigherTest.kt +++ b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/RecordWeigherTest.kt @@ -32,7 +32,7 @@ class RecordWeigherTest { ) ) - assertTrue(record.sizeInBytes <= 284) + assertTrue(record.sizeInBytes <= 377) assertTrue(record.sizeInBytes >= 258) // JS takes less space, maybe for strings? } } diff --git a/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api b/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api index 15fccad4..25bc42b8 100644 --- a/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api +++ b/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api @@ -12,12 +12,12 @@ public final class com/apollographql/cache/normalized/sql/ApolloInitializer$Comp public final class com/apollographql/cache/normalized/sql/SqlNormalizedCache : com/apollographql/cache/normalized/api/NormalizedCache { public fun clearAll ()V public fun dump ()Ljava/util/Map; - public fun loadRecord-eNSUWrY (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; + public fun loadRecord-eNSUWrY (Lokio/ByteString;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; public fun loadRecords (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Collection; public fun merge (Lcom/apollographql/cache/normalized/api/Record;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public fun merge (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public fun remove (Ljava/util/Collection;Z)I - public fun remove-eNSUWrY (Ljava/lang/String;Z)Z + public fun remove-eNSUWrY (Lokio/ByteString;Z)Z public fun trim (JF)J } @@ -39,21 +39,21 @@ public final class com/apollographql/cache/normalized/sql/VersionKt { } public final class com/apollographql/cache/normalized/sql/internal/fields/Field_ { - public fun (Ljava/lang/String;Ljava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)V - public final fun component1 ()Ljava/lang/String; + public fun ([BLjava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)V + public final fun component1 ()[B public final fun component2 ()Ljava/lang/String; public final fun component3 ()[B public final fun component4 ()[B public final fun component5 ()Ljava/lang/Long; public final fun component6 ()Ljava/lang/Long; - public final fun copy (Ljava/lang/String;Ljava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)Lcom/apollographql/cache/normalized/sql/internal/fields/Field_; - public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/fields/Field_;Ljava/lang/String;Ljava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/fields/Field_; + public final fun copy ([BLjava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)Lcom/apollographql/cache/normalized/sql/internal/fields/Field_; + public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/fields/Field_;[BLjava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/fields/Field_; public fun equals (Ljava/lang/Object;)Z public final fun getExpiration_date ()Ljava/lang/Long; public final fun getField_ ()Ljava/lang/String; public final fun getMetadata ()[B public final fun getReceived_date ()Ljava/lang/Long; - public final fun getRecord ()Ljava/lang/String; + public final fun getRecord ()[B public final fun getValue_ ()[B public fun hashCode ()I public fun toString ()Ljava/lang/String; @@ -75,8 +75,7 @@ public final class com/apollographql/cache/normalized/sql/internal/fields/Fields public final fun count ()Lapp/cash/sqldelight/Query; public final fun deleteAllRecords ()V public final fun deleteRecords (Ljava/util/Collection;)V - public final fun deleteRecordsMatching (Ljava/lang/String;)V - public final fun insertOrUpdateField (Ljava/lang/String;Ljava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)V + public final fun insertOrUpdateField ([BLjava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)V public final fun selectAllRecords ()Lapp/cash/sqldelight/Query; public final fun selectAllRecords (Lkotlin/jvm/functions/Function6;)Lapp/cash/sqldelight/Query; public final fun selectRecords (Ljava/util/Collection;)Lapp/cash/sqldelight/Query; diff --git a/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api b/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api index 27f7f3d9..69b0699f 100644 --- a/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api +++ b/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api @@ -1,12 +1,12 @@ public final class com/apollographql/cache/normalized/sql/SqlNormalizedCache : com/apollographql/cache/normalized/api/NormalizedCache { public fun clearAll ()V public fun dump ()Ljava/util/Map; - public fun loadRecord-eNSUWrY (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; + public fun loadRecord-eNSUWrY (Lokio/ByteString;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; public fun loadRecords (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Collection; public fun merge (Lcom/apollographql/cache/normalized/api/Record;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public fun merge (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public fun remove (Ljava/util/Collection;Z)I - public fun remove-eNSUWrY (Ljava/lang/String;Z)Z + public fun remove-eNSUWrY (Lokio/ByteString;Z)Z public fun trim (JF)J } @@ -24,21 +24,21 @@ public final class com/apollographql/cache/normalized/sql/VersionKt { } public final class com/apollographql/cache/normalized/sql/internal/fields/Field_ { - public fun (Ljava/lang/String;Ljava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)V - public final fun component1 ()Ljava/lang/String; + public fun ([BLjava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)V + public final fun component1 ()[B public final fun component2 ()Ljava/lang/String; public final fun component3 ()[B public final fun component4 ()[B public final fun component5 ()Ljava/lang/Long; public final fun component6 ()Ljava/lang/Long; - public final fun copy (Ljava/lang/String;Ljava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)Lcom/apollographql/cache/normalized/sql/internal/fields/Field_; - public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/fields/Field_;Ljava/lang/String;Ljava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/fields/Field_; + public final fun copy ([BLjava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)Lcom/apollographql/cache/normalized/sql/internal/fields/Field_; + public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/fields/Field_;[BLjava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/fields/Field_; public fun equals (Ljava/lang/Object;)Z public final fun getExpiration_date ()Ljava/lang/Long; public final fun getField_ ()Ljava/lang/String; public final fun getMetadata ()[B public final fun getReceived_date ()Ljava/lang/Long; - public final fun getRecord ()Ljava/lang/String; + public final fun getRecord ()[B public final fun getValue_ ()[B public fun hashCode ()I public fun toString ()Ljava/lang/String; @@ -60,8 +60,7 @@ public final class com/apollographql/cache/normalized/sql/internal/fields/Fields public final fun count ()Lapp/cash/sqldelight/Query; public final fun deleteAllRecords ()V public final fun deleteRecords (Ljava/util/Collection;)V - public final fun deleteRecordsMatching (Ljava/lang/String;)V - public final fun insertOrUpdateField (Ljava/lang/String;Ljava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)V + public final fun insertOrUpdateField ([BLjava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)V public final fun selectAllRecords ()Lapp/cash/sqldelight/Query; public final fun selectAllRecords (Lkotlin/jvm/functions/Function6;)Lapp/cash/sqldelight/Query; public final fun selectRecords (Ljava/util/Collection;)Lapp/cash/sqldelight/Query; diff --git a/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api b/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api index e3843caa..8b072d78 100644 --- a/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api +++ b/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api @@ -19,7 +19,7 @@ abstract interface com.apollographql.cache.normalized.sql.internal.fields/Fields } final class com.apollographql.cache.normalized.sql.internal.fields/Field_ { // com.apollographql.cache.normalized.sql.internal.fields/Field_|null[0] - constructor (kotlin/String, kotlin/String, kotlin/ByteArray?, kotlin/ByteArray?, kotlin/Long?, kotlin/Long?) // com.apollographql.cache.normalized.sql.internal.fields/Field_.|(kotlin.String;kotlin.String;kotlin.ByteArray?;kotlin.ByteArray?;kotlin.Long?;kotlin.Long?){}[0] + constructor (kotlin/ByteArray, kotlin/String, kotlin/ByteArray?, kotlin/ByteArray?, kotlin/Long?, kotlin/Long?) // com.apollographql.cache.normalized.sql.internal.fields/Field_.|(kotlin.ByteArray;kotlin.String;kotlin.ByteArray?;kotlin.ByteArray?;kotlin.Long?;kotlin.Long?){}[0] final val expiration_date // com.apollographql.cache.normalized.sql.internal.fields/Field_.expiration_date|{}expiration_date[0] final fun (): kotlin/Long? // com.apollographql.cache.normalized.sql.internal.fields/Field_.expiration_date.|(){}[0] @@ -30,17 +30,17 @@ final class com.apollographql.cache.normalized.sql.internal.fields/Field_ { // c final val received_date // com.apollographql.cache.normalized.sql.internal.fields/Field_.received_date|{}received_date[0] final fun (): kotlin/Long? // com.apollographql.cache.normalized.sql.internal.fields/Field_.received_date.|(){}[0] final val record // com.apollographql.cache.normalized.sql.internal.fields/Field_.record|{}record[0] - final fun (): kotlin/String // com.apollographql.cache.normalized.sql.internal.fields/Field_.record.|(){}[0] + final fun (): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.fields/Field_.record.|(){}[0] final val value_ // com.apollographql.cache.normalized.sql.internal.fields/Field_.value_|{}value_[0] final fun (): kotlin/ByteArray? // com.apollographql.cache.normalized.sql.internal.fields/Field_.value_.|(){}[0] - final fun component1(): kotlin/String // com.apollographql.cache.normalized.sql.internal.fields/Field_.component1|component1(){}[0] + final fun component1(): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.fields/Field_.component1|component1(){}[0] final fun component2(): kotlin/String // com.apollographql.cache.normalized.sql.internal.fields/Field_.component2|component2(){}[0] final fun component3(): kotlin/ByteArray? // com.apollographql.cache.normalized.sql.internal.fields/Field_.component3|component3(){}[0] final fun component4(): kotlin/ByteArray? // com.apollographql.cache.normalized.sql.internal.fields/Field_.component4|component4(){}[0] final fun component5(): kotlin/Long? // com.apollographql.cache.normalized.sql.internal.fields/Field_.component5|component5(){}[0] final fun component6(): kotlin/Long? // com.apollographql.cache.normalized.sql.internal.fields/Field_.component6|component6(){}[0] - final fun copy(kotlin/String = ..., kotlin/String = ..., kotlin/ByteArray? = ..., kotlin/ByteArray? = ..., kotlin/Long? = ..., kotlin/Long? = ...): com.apollographql.cache.normalized.sql.internal.fields/Field_ // com.apollographql.cache.normalized.sql.internal.fields/Field_.copy|copy(kotlin.String;kotlin.String;kotlin.ByteArray?;kotlin.ByteArray?;kotlin.Long?;kotlin.Long?){}[0] + final fun copy(kotlin/ByteArray = ..., kotlin/String = ..., kotlin/ByteArray? = ..., kotlin/ByteArray? = ..., kotlin/Long? = ..., kotlin/Long? = ...): com.apollographql.cache.normalized.sql.internal.fields/Field_ // com.apollographql.cache.normalized.sql.internal.fields/Field_.copy|copy(kotlin.ByteArray;kotlin.String;kotlin.ByteArray?;kotlin.ByteArray?;kotlin.Long?;kotlin.Long?){}[0] final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.sql.internal.fields/Field_.equals|equals(kotlin.Any?){}[0] final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.sql.internal.fields/Field_.hashCode|hashCode(){}[0] final fun toString(): kotlin/String // com.apollographql.cache.normalized.sql.internal.fields/Field_.toString|toString(){}[0] @@ -49,16 +49,15 @@ final class com.apollographql.cache.normalized.sql.internal.fields/Field_ { // c final class com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries : app.cash.sqldelight/TransacterImpl { // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries|null[0] constructor (app.cash.sqldelight.db/SqlDriver) // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.|(app.cash.sqldelight.db.SqlDriver){}[0] - final fun <#A1: kotlin/Any> selectAllRecords(kotlin/Function6): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.selectAllRecords|selectAllRecords(kotlin.Function6){0§}[0] - final fun <#A1: kotlin/Any> selectRecords(kotlin.collections/Collection, kotlin/Function6): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.selectRecords|selectRecords(kotlin.collections.Collection;kotlin.Function6){0§}[0] + final fun <#A1: kotlin/Any> selectAllRecords(kotlin/Function6): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.selectAllRecords|selectAllRecords(kotlin.Function6){0§}[0] + final fun <#A1: kotlin/Any> selectRecords(kotlin.collections/Collection, kotlin/Function6): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.selectRecords|selectRecords(kotlin.collections.Collection;kotlin.Function6){0§}[0] final fun changes(): app.cash.sqldelight/ExecutableQuery // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.changes|changes(){}[0] final fun count(): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.count|count(){}[0] final fun deleteAllRecords() // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.deleteAllRecords|deleteAllRecords(){}[0] - final fun deleteRecords(kotlin.collections/Collection) // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.deleteRecords|deleteRecords(kotlin.collections.Collection){}[0] - final fun deleteRecordsMatching(kotlin/String) // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.deleteRecordsMatching|deleteRecordsMatching(kotlin.String){}[0] - final fun insertOrUpdateField(kotlin/String, kotlin/String, kotlin/ByteArray?, kotlin/ByteArray?, kotlin/Long?, kotlin/Long?) // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.insertOrUpdateField|insertOrUpdateField(kotlin.String;kotlin.String;kotlin.ByteArray?;kotlin.ByteArray?;kotlin.Long?;kotlin.Long?){}[0] + final fun deleteRecords(kotlin.collections/Collection) // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.deleteRecords|deleteRecords(kotlin.collections.Collection){}[0] + final fun insertOrUpdateField(kotlin/ByteArray, kotlin/String, kotlin/ByteArray?, kotlin/ByteArray?, kotlin/Long?, kotlin/Long?) // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.insertOrUpdateField|insertOrUpdateField(kotlin.ByteArray;kotlin.String;kotlin.ByteArray?;kotlin.ByteArray?;kotlin.Long?;kotlin.Long?){}[0] final fun selectAllRecords(): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.selectAllRecords|selectAllRecords(){}[0] - final fun selectRecords(kotlin.collections/Collection): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.selectRecords|selectRecords(kotlin.collections.Collection){}[0] + final fun selectRecords(kotlin.collections/Collection): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.selectRecords|selectRecords(kotlin.collections.Collection){}[0] final fun trimByReceivedDate(kotlin/Long) // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.trimByReceivedDate|trimByReceivedDate(kotlin.Long){}[0] } diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCache.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCache.kt index 19f277ae..ace4ebe4 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCache.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCache.kt @@ -11,6 +11,7 @@ import com.apollographql.cache.normalized.api.RecordMerger import com.apollographql.cache.normalized.api.RecordMergerContext import com.apollographql.cache.normalized.api.withDates import com.apollographql.cache.normalized.sql.internal.RecordDatabase +import okio.ByteString import kotlin.reflect.KClass class SqlNormalizedCache internal constructor( @@ -69,9 +70,13 @@ class SqlNormalizedCache internal constructor( return mapOf(this::class to recordDatabase.selectAllRecords().associateBy { it.key }) } - private fun getReferencedKeysRecursively(keys: Collection, visited: MutableSet = mutableSetOf()): Set { + private fun getReferencedKeysRecursively( + keys: Collection, + visited: MutableSet = mutableSetOf(), + ): Set { if (keys.isEmpty()) return emptySet() - val referencedKeys = recordDatabase.selectRecords(keys - visited).flatMap { it.referencedFields() }.map { it.key }.toSet() + val referencedKeys = + recordDatabase.selectRecords((keys - visited).map { it.toByteArray() }).flatMap { it.referencedFields() }.map { it.key }.toSet() visited += keys return referencedKeys + getReferencedKeysRecursively(referencedKeys, visited) } @@ -79,14 +84,14 @@ class SqlNormalizedCache internal constructor( /** * Assumes an enclosing transaction */ - private fun internalDeleteRecords(keys: Collection, cascade: Boolean): Int { + private fun internalDeleteRecords(keys: Collection, cascade: Boolean): Int { val referencedKeys = if (cascade) { getReferencedKeysRecursively(keys) } else { emptySet() } return (keys + referencedKeys).chunked(999).sumOf { chunkedKeys -> - recordDatabase.deleteRecords(chunkedKeys) + recordDatabase.deleteRecords(chunkedKeys.map { it.toByteArray() }) recordDatabase.changes().toInt() } } @@ -138,7 +143,7 @@ class SqlNormalizedCache internal constructor( */ private fun selectRecords(keys: Collection): List { return keys - .map { it.key } + .map { it.key.toByteArray() } .chunked(999).flatMap { chunkedKeys -> recordDatabase.selectRecords(chunkedKeys) } diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/ApolloJsonElementSerializer.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/ApolloJsonElementSerializer.kt index d110c211..dec8d241 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/ApolloJsonElementSerializer.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/ApolloJsonElementSerializer.kt @@ -1,8 +1,8 @@ package com.apollographql.cache.normalized.sql.internal -import com.apollographql.apollo.api.json.ApolloJsonElement import com.apollographql.apollo.api.Error import com.apollographql.apollo.api.Error.Builder +import com.apollographql.apollo.api.json.ApolloJsonElement import com.apollographql.apollo.api.json.JsonNumber import com.apollographql.cache.normalized.api.CacheKey import okio.Buffer @@ -107,7 +107,7 @@ internal object ApolloJsonElementSerializer { is CacheKey -> { buffer.writeByte(CACHE_KEY) - buffer.writeString(value.key) + buffer.write(value.key) } is List<*> -> { @@ -174,7 +174,7 @@ internal object ApolloJsonElementSerializer { BOOLEAN_TRUE -> true BOOLEAN_FALSE -> false CACHE_KEY -> { - CacheKey(readString(), isHashed = true) + CacheKey(readByteString(CacheKey.HASH_SIZE_BYTES.toLong())) } LIST -> { diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt index fd5e5341..db40bf76 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt @@ -12,6 +12,8 @@ import com.apollographql.cache.normalized.api.receivedDate import com.apollographql.cache.normalized.sql.internal.fields.Field_ import com.apollographql.cache.normalized.sql.internal.fields.FieldsDatabase import com.apollographql.cache.normalized.sql.internal.fields.FieldsQueries +import okio.ByteString +import okio.ByteString.Companion.toByteString internal class RecordDatabase(private val driver: SqlDriver) { private val fieldsQueries: FieldsQueries = FieldsDatabase(driver).fieldsQueries @@ -25,17 +27,19 @@ internal class RecordDatabase(private val driver: SqlDriver) { /** * @param keys the keys of the records to select, size must be <= 999 */ - fun selectRecords(keys: Collection): List { - val fieldsByRecordKey: Map> = fieldsQueries.selectRecords(keys).executeAsList().groupBy { it.record } + fun selectRecords(keys: Collection): List { + val fieldsByRecordKey: Map> = + fieldsQueries.selectRecords(keys).executeAsList().groupBy { it.record.toByteString() } return fieldsByRecordKey.toRecords() } fun selectAllRecords(): List { - val fieldsByRecordKey: Map> = fieldsQueries.selectAllRecords().executeAsList().groupBy { it.record } + val fieldsByRecordKey: Map> = + fieldsQueries.selectAllRecords().executeAsList().groupBy { it.record.toByteString() } return fieldsByRecordKey.toRecords() } - private fun Map>.toRecords(): List = + private fun Map>.toRecords(): List = mapValues { (key, fieldList) -> val fieldValues: Map = fieldList.associate { field -> field.field_ to ApolloJsonElementSerializer.deserialize(field.value_) @@ -59,7 +63,7 @@ internal class RecordDatabase(private val driver: SqlDriver) { } }.filterValues { it.isNotEmpty() } Record( - key = CacheKey(key, isHashed = true), + key = CacheKey(key), fields = fieldValues, metadata = metadata, ) @@ -68,7 +72,7 @@ internal class RecordDatabase(private val driver: SqlDriver) { fun insertOrUpdateRecord(record: Record) { for ((field, value) in record.fields) { insertOrUpdateField( - record = record.key.key, + record = record.key.key.toByteArray(), field = field, value = value, metadata = record.metadata[field], @@ -79,7 +83,7 @@ internal class RecordDatabase(private val driver: SqlDriver) { } private fun insertOrUpdateField( - record: String, + record: ByteArray, field: String, value: ApolloJsonElement, metadata: Map?, @@ -106,14 +110,10 @@ internal class RecordDatabase(private val driver: SqlDriver) { /** * @param keys the keys of the records to delete, size must be <= 999 */ - fun deleteRecords(keys: Collection) { + fun deleteRecords(keys: Collection) { fieldsQueries.deleteRecords(keys) } - fun deleteRecordsMatching(pattern: String) { - fieldsQueries.deleteRecordsMatching(pattern) - } - fun deleteAllRecords() { fieldsQueries.deleteAllRecords() } diff --git a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/apollographql/cache/normalized/sql/internal/fields/fields.sq b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/apollographql/cache/normalized/sql/internal/fields/fields.sq index f33bcd5e..d65e3962 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/apollographql/cache/normalized/sql/internal/fields/fields.sq +++ b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/apollographql/cache/normalized/sql/internal/fields/fields.sq @@ -1,5 +1,5 @@ CREATE TABLE field ( - record TEXT NOT NULL, + record BLOB NOT NULL, field TEXT NOT NULL, value BLOB, metadata BLOB, @@ -21,9 +21,6 @@ INSERT INTO field (record, field, value, metadata, received_date, expiration_dat deleteRecords: DELETE FROM field WHERE record IN ?; -deleteRecordsMatching: -DELETE FROM field WHERE record LIKE ? ESCAPE '\'; - deleteAllRecords: DELETE FROM field; diff --git a/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt b/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt index 63acf2e0..c690fc14 100644 --- a/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt +++ b/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt @@ -13,6 +13,7 @@ import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.DefaultRecordMerger import com.apollographql.cache.normalized.api.NormalizedCache import com.apollographql.cache.normalized.api.Record +import com.apollographql.cache.normalized.api.fieldKey import com.apollographql.cache.normalized.sql.internal.RecordDatabase import kotlin.test.BeforeTest import kotlin.test.Test @@ -150,7 +151,7 @@ class SqlNormalizedCacheTest { ) val record = cache.loadRecord(STANDARD_KEY, CacheHeaders.NONE) assertNotNull(record) - assertEquals(expected = setOf("${STANDARD_KEY.key}.fieldKey", "${STANDARD_KEY.key}.newFieldKey"), actual = changedKeys) + assertEquals(expected = setOf(STANDARD_KEY.fieldKey("fieldKey"), STANDARD_KEY.fieldKey("newFieldKey")), actual = changedKeys) assertEquals(expected = "valueUpdated", actual = record.fields["fieldKey"]) assertEquals(expected = true, actual = record.fields["newFieldKey"]) } diff --git a/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt b/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt index 355b6bc5..f0a3b353 100644 --- a/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt +++ b/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt @@ -16,6 +16,7 @@ import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.FetchPolicy import com.apollographql.cache.normalized.api.CacheHeaders import com.apollographql.cache.normalized.api.CacheKey +import com.apollographql.cache.normalized.api.append import com.apollographql.cache.normalized.apolloStore import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.memory.MemoryCacheFactory @@ -453,7 +454,7 @@ class DeferNormalizedCacheTest { val cacheExceptionResponse = actual.last() assertIs(networkExceptionResponse.exception) assertIs(cacheExceptionResponse.exception) - val key = CacheKey((CacheKey("computers.0").key + ".screen")).key + val key = CacheKey("computers").append("0", "screen").keyToString() assertEquals("Object '$key' has no field named 'isColor'", cacheExceptionResponse.exception!!.message) } @@ -541,7 +542,7 @@ class DeferNormalizedCacheTest { val multipartBody = mockServer.enqueueMultipart("application/json") multipartBody.enqueuePart(jsonList[0].encodeUtf8(), false) val recordFields = apolloClient.query(SimpleDeferQuery()).fetchPolicy(FetchPolicy.NetworkOnly).toFlow().map { - apolloClient.apolloStore.accessCache { it.loadRecord(CacheKey("computers.0"), CacheHeaders.NONE)!!.fields }.also { + apolloClient.apolloStore.accessCache { it.loadRecord(CacheKey("computers").append("0"), CacheHeaders.NONE)!!.fields }.also { multipartBody.enqueuePart(jsonList[1].encodeUtf8(), true) } }.toList() diff --git a/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt b/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt index 58542b79..32aa0c78 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt @@ -6,6 +6,7 @@ import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.FetchPolicy import com.apollographql.cache.normalized.allRecords import com.apollographql.cache.normalized.api.CacheKey +import com.apollographql.cache.normalized.api.append import com.apollographql.cache.normalized.api.fieldKey import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.memory.MemoryCacheFactory @@ -98,24 +99,24 @@ class DanglingReferencesTest { val removedFieldsAndRecords = store.removeDanglingReferences() assertEquals( setOf( - CacheKey(CacheKey("metaProjects.0.0").fieldKey("type")).fieldKey("owners"), - CacheKey("metaProjects.0.0").fieldKey("type"), + CacheKey("metaProjects").append("0", "0", "type").fieldKey("owners"), + CacheKey("metaProjects").append("0", "0").fieldKey("type"), CacheKey("QUERY_ROOT").fieldKey("metaProjects"), ), removedFieldsAndRecords.removedFields ) assertEquals( setOf( - CacheKey(CacheKey("metaProjects.0.0").fieldKey("type")), - CacheKey("metaProjects.0.0"), + CacheKey("metaProjects").append("0", "0", "type"), + CacheKey("metaProjects").append("0", "0"), CacheKey("QUERY_ROOT"), ), removedFieldsAndRecords.removedRecords ) val allRecords = store.accessCache { it.allRecords() } assertFalse(allRecords.containsKey(CacheKey("QUERY_ROOT"))) - assertFalse(allRecords.containsKey(CacheKey("metaProjects.0.0"))) - assertFalse(allRecords.containsKey(CacheKey(CacheKey("metaProjects.0.0").fieldKey("type")))) + assertFalse(allRecords.containsKey(CacheKey("metaProjects").append("0", "0"))) + assertFalse(allRecords.containsKey(CacheKey("metaProjects").append("0", "0", "type"))) } } diff --git a/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt b/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt index b563bf79..0c1fe1ed 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt @@ -7,6 +7,7 @@ import com.apollographql.cache.normalized.FetchPolicy import com.apollographql.cache.normalized.allRecords import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.SchemaCoordinatesMaxAgeProvider +import com.apollographql.cache.normalized.api.append import com.apollographql.cache.normalized.api.fieldKey import com.apollographql.cache.normalized.cacheHeaders import com.apollographql.cache.normalized.fetchPolicy @@ -60,35 +61,35 @@ class GarbageCollectTest { val garbageCollectResult = store.garbageCollect(maxAgeProvider) assertEquals( setOf( - CacheKey(CacheKey("metaProjects.0.0").fieldKey("type")).fieldKey("owners"), - CacheKey(CacheKey("metaProjects.0.1").fieldKey("type")).fieldKey("owners"), - CacheKey(CacheKey("metaProjects.1.0").fieldKey("type")).fieldKey("owners"), + CacheKey("metaProjects").append("0", "0", "type").fieldKey("owners"), + CacheKey("metaProjects").append("0", "1", "type").fieldKey("owners"), + CacheKey("metaProjects").append("1", "0", "type").fieldKey("owners"), ), garbageCollectResult.removedStaleFields.removedFields ) assertEquals( setOf( - CacheKey(CacheKey("metaProjects.0.0").fieldKey("type")), - CacheKey(CacheKey("metaProjects.0.1").fieldKey("type")), - CacheKey(CacheKey("metaProjects.1.0").fieldKey("type")), + CacheKey("metaProjects").append("0", "0", "type"), + CacheKey("metaProjects").append("0", "1", "type"), + CacheKey("metaProjects").append("1", "0", "type"), ), garbageCollectResult.removedStaleFields.removedRecords ) assertEquals( setOf( - CacheKey("metaProjects.0.0").fieldKey("type"), - CacheKey("metaProjects.0.1").fieldKey("type"), - CacheKey("metaProjects.1.0").fieldKey("type"), + CacheKey("metaProjects").append("0", "0").fieldKey("type"), + CacheKey("metaProjects").append("0", "1").fieldKey("type"), + CacheKey("metaProjects").append("1", "0").fieldKey("type"), CacheKey("QUERY_ROOT").fieldKey("metaProjects"), ), garbageCollectResult.removedDanglingReferences.removedFields ) assertEquals( setOf( - CacheKey("metaProjects.0.0"), - CacheKey("metaProjects.0.1"), - CacheKey("metaProjects.1.0"), + CacheKey("metaProjects").append("0", "0"), + CacheKey("metaProjects").append("0", "1"), + CacheKey("metaProjects").append("1", "0"), CacheKey("QUERY_ROOT"), ), garbageCollectResult.removedDanglingReferences.removedRecords diff --git a/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt b/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt index 2059531b..88649b73 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt @@ -11,6 +11,7 @@ import com.apollographql.cache.normalized.api.CacheHeaders import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.GlobalMaxAgeProvider import com.apollographql.cache.normalized.api.SchemaCoordinatesMaxAgeProvider +import com.apollographql.cache.normalized.api.append import com.apollographql.cache.normalized.api.fieldKey import com.apollographql.cache.normalized.cacheHeaders import com.apollographql.cache.normalized.fetchPolicy @@ -132,10 +133,10 @@ class StaleFieldsTest { .execute() var allRecords = store.accessCache { it.allRecords() } - assertTrue(allRecords[CacheKey("projects.0")]!!.fields.containsKey("velocity")) - assertTrue(allRecords[CacheKey("projects.0")]!!.fields.containsKey("isUrgent")) - assertTrue(allRecords[CacheKey("projects.1")]!!.fields.containsKey("velocity")) - assertTrue(allRecords[CacheKey("projects.1")]!!.fields.containsKey("isUrgent")) + assertTrue(allRecords[CacheKey("projects").append("0")]!!.fields.containsKey("velocity")) + assertTrue(allRecords[CacheKey("projects").append("0")]!!.fields.containsKey("isUrgent")) + assertTrue(allRecords[CacheKey("projects").append("1")]!!.fields.containsKey("velocity")) + assertTrue(allRecords[CacheKey("projects").append("1")]!!.fields.containsKey("isUrgent")) val maxAgeProvider = SchemaCoordinatesMaxAgeProvider( Cache.maxAges, @@ -145,18 +146,18 @@ class StaleFieldsTest { // Project.velocity has a max age of 60 seconds, so they should be removed / Project.isUrgent has a max age of 90 seconds, so they should be kept assertEquals( setOf( - CacheKey("projects.0").fieldKey("velocity"), - CacheKey("projects.1").fieldKey("velocity"), + CacheKey("projects").append("0").fieldKey("velocity"), + CacheKey("projects").append("1").fieldKey("velocity"), ), removedFieldsAndRecords.removedFields ) assertEquals( emptySet(), removedFieldsAndRecords.removedRecords ) allRecords = store.accessCache { it.allRecords() } - assertFalse(allRecords[CacheKey("projects.0")]!!.fields.containsKey("velocity")) - assertTrue(allRecords[CacheKey("projects.0")]!!.fields.containsKey("isUrgent")) - assertFalse(allRecords[CacheKey("projects.1")]!!.fields.containsKey("velocity")) - assertTrue(allRecords[CacheKey("projects.1")]!!.fields.containsKey("isUrgent")) + assertFalse(allRecords[CacheKey("projects").append("0")]!!.fields.containsKey("velocity")) + assertTrue(allRecords[CacheKey("projects").append("0")]!!.fields.containsKey("isUrgent")) + assertFalse(allRecords[CacheKey("projects").append("1")]!!.fields.containsKey("velocity")) + assertTrue(allRecords[CacheKey("projects").append("1")]!!.fields.containsKey("isUrgent")) mockServer.enqueueString(PROJECT_LIST_RESPONSE) apolloClient.query(ProjectListQuery()) @@ -167,21 +168,21 @@ class StaleFieldsTest { // Project.velocity and Project.isUrgent should be removed, their records being empty they should be removed assertEquals( setOf( - CacheKey("projects.0").fieldKey("velocity"), - CacheKey("projects.0").fieldKey("isUrgent"), - CacheKey("projects.1").fieldKey("velocity"), - CacheKey("projects.1").fieldKey("isUrgent"), + CacheKey("projects").append("0").fieldKey("velocity"), + CacheKey("projects").append("0").fieldKey("isUrgent"), + CacheKey("projects").append("1").fieldKey("velocity"), + CacheKey("projects").append("1").fieldKey("isUrgent"), ), removedFieldsAndRecords.removedFields ) assertEquals( setOf( - CacheKey("projects.0"), - CacheKey("projects.1"), + CacheKey("projects").append("0"), + CacheKey("projects").append("1"), ), removedFieldsAndRecords.removedRecords ) allRecords = store.accessCache { it.allRecords() } - assertFalse(allRecords.containsKey(CacheKey("projects.0"))) - assertFalse(allRecords.containsKey(CacheKey("projects.1"))) + assertFalse(allRecords.containsKey(CacheKey("projects").append("0"))) + assertFalse(allRecords.containsKey(CacheKey("projects").append("1"))) } } diff --git a/tests/normalized-cache/src/commonTest/kotlin/FetchPolicyTest.kt b/tests/normalized-cache/src/commonTest/kotlin/FetchPolicyTest.kt index 86672212..dda69db3 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/FetchPolicyTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/FetchPolicyTest.kt @@ -26,6 +26,7 @@ import com.apollographql.cache.normalized.CacheFirstInterceptor import com.apollographql.cache.normalized.CacheOnlyInterceptor import com.apollographql.cache.normalized.FetchPolicy import com.apollographql.cache.normalized.api.CacheKey +import com.apollographql.cache.normalized.api.fieldKey import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.isFromCache import com.apollographql.cache.normalized.memory.MemoryCacheFactory @@ -603,7 +604,7 @@ class FetchPolicyTest { ) } ) - store.publish(setOf("${CacheKey.rootKey().key}.hero")) + store.publish(setOf(CacheKey.rootKey().fieldKey("hero"))) /** * This time the watcher should do a network request diff --git a/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt b/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt index 5b0ef87b..b46a23c6 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt @@ -8,7 +8,7 @@ import com.apollographql.cache.normalized.api.DefaultRecordMerger import com.apollographql.cache.normalized.api.IdCacheKeyGenerator import com.apollographql.cache.normalized.api.NormalizedCache import com.apollographql.cache.normalized.api.Record -import com.apollographql.cache.normalized.api.fieldKey +import com.apollographql.cache.normalized.api.append import com.apollographql.cache.normalized.internal.normalized import com.apollographql.cache.normalized.memory.MemoryCacheFactory import httpcache.AllPlanetsQuery @@ -106,13 +106,13 @@ class NormalizerTest { assertEquals(heroRecord!!["name"], "R2-D2") assertEquals( listOf( - CacheKey(CacheKey(TEST_FIELD_KEY_JEDI).fieldKey("friends.0")), - CacheKey(CacheKey(TEST_FIELD_KEY_JEDI).fieldKey("friends.1")), - CacheKey(CacheKey(TEST_FIELD_KEY_JEDI).fieldKey("friends.2")), + CacheKey(TEST_FIELD_KEY_JEDI).append("friends", "0"), + CacheKey(TEST_FIELD_KEY_JEDI).append("friends", "1"), + CacheKey(TEST_FIELD_KEY_JEDI).append("friends", "2"), ), heroRecord["friends"] ) - val luke = records.get(CacheKey(CacheKey(TEST_FIELD_KEY_JEDI).fieldKey("friends.0"))) + val luke = records.get(CacheKey(TEST_FIELD_KEY_JEDI).append("friends", "0")) assertEquals(luke!!["name"], "Luke Skywalker") } @@ -148,13 +148,13 @@ class NormalizerTest { assertEquals(heroRecord!!["name"], "R2-D2") assertEquals( listOf( - CacheKey("${CacheKey("Character:2001").key}.friends.0"), - CacheKey("${CacheKey("Character:2001").key}.friends.1"), - CacheKey("${CacheKey("Character:2001").key}.friends.2") + CacheKey("Character:2001").append("friends", "0"), + CacheKey("Character:2001").append("friends", "1"), + CacheKey("Character:2001").append("friends", "2") ), heroRecord["friends"] ) - val luke = records.get(CacheKey("${CacheKey("Character:2001").key}.friends.0")) + val luke = records.get(CacheKey("Character:2001").append("friends", "0")) assertEquals(luke!!["name"], "Luke Skywalker") } @@ -207,7 +207,7 @@ class NormalizerTest { @Throws(Exception::class) fun testHeroParentTypeDependentFieldDroid() { val records = records(HeroParentTypeDependentFieldQuery(Episode.JEDI), "HeroParentTypeDependentFieldDroidResponse.json") - val lukeRecord = records.get(CacheKey(CacheKey(TEST_FIELD_KEY_JEDI).fieldKey("friends.0"))) + val lukeRecord = records.get(CacheKey(TEST_FIELD_KEY_JEDI).append("friends", "0")) assertEquals(lukeRecord!!["name"], "Luke Skywalker") assertEquals(lukeRecord["height({\"unit\":\"METER\"})"], 1.72) @@ -215,21 +215,21 @@ class NormalizerTest { val friends = records[CacheKey(TEST_FIELD_KEY_JEDI)]!!["friends"] assertIs>(friends) - assertEquals(friends[0], CacheKey(CacheKey(TEST_FIELD_KEY_JEDI).fieldKey("friends.0"))) - assertEquals(friends[1], CacheKey(CacheKey(TEST_FIELD_KEY_JEDI).fieldKey("friends.1"))) - assertEquals(friends[2], CacheKey(CacheKey(TEST_FIELD_KEY_JEDI).fieldKey("friends.2"))) + assertEquals(friends[0], CacheKey(TEST_FIELD_KEY_JEDI).append("friends", "0")) + assertEquals(friends[1], CacheKey(TEST_FIELD_KEY_JEDI).append("friends", "1")) + assertEquals(friends[2], CacheKey(TEST_FIELD_KEY_JEDI).append("friends", "2")) } @Test fun list_of_objects_with_null_object() { val records = records(AllPlanetsQuery(), "AllPlanetsListOfObjectWithNullObject.json") - val fieldKey = CacheKey("allPlanets({\"first\":300})").key + val fieldKey = CacheKey("allPlanets({\"first\":300})") - var record: Record? = records[CacheKey("$fieldKey.planets.0")] + var record: Record? = records[fieldKey.append("planets", "0")] assertTrue(record?.get("filmConnection") == null) - record = records.get(CacheKey(CacheKey("$fieldKey.planets.0").fieldKey("filmConnection"))) + record = records.get(fieldKey.append("planets", "0", "filmConnection")) assertTrue(record == null) - record = records.get(CacheKey(CacheKey("$fieldKey.planets.1").fieldKey("filmConnection"))) + record = records.get(fieldKey.append("planets", "1", "filmConnection")) assertTrue(record != null) } @@ -239,7 +239,7 @@ class NormalizerTest { fun testHeroParentTypeDependentFieldHuman() { val records = records(HeroParentTypeDependentFieldQuery(Episode.EMPIRE), "HeroParentTypeDependentFieldHumanResponse.json") - val lukeRecord = records.get(CacheKey(CacheKey(TEST_FIELD_KEY_EMPIRE).fieldKey("friends.0"))) + val lukeRecord = records.get(CacheKey(TEST_FIELD_KEY_EMPIRE).append("friends", "0")) assertEquals(lukeRecord!!["name"], "Han Solo") assertEquals(lukeRecord["height({\"unit\":\"FOOT\"})"], 5.905512) } diff --git a/tests/normalized-cache/src/commonTest/kotlin/OtherCacheTest.kt b/tests/normalized-cache/src/commonTest/kotlin/OtherCacheTest.kt index 8399d1bc..d18ddb6c 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/OtherCacheTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/OtherCacheTest.kt @@ -73,7 +73,7 @@ class OtherCacheTest { // Some details are not present in the master query, we should get a cache miss val e = apolloClient.query(CharacterDetailsQuery("1002")).fetchPolicy(FetchPolicy.CacheOnly).execute().exception as CacheMissException - assertTrue(e.message!!.contains("Object '${CacheKey("Character:1002").key}' has no field named '__typename'")) + assertTrue(e.message!!.contains("Object '${CacheKey("Character:1002").keyToString()}' has no field named '__typename'")) } @@ -83,7 +83,7 @@ class OtherCacheTest { .fetchPolicy(FetchPolicy.CacheOnly) .execute() .exception!! - assertTrue(e.message!!.contains("Object 'QUERY_ROOT' has no field named 'hero")) + assertTrue(e.message!!.contains("Object '${CacheKey("QUERY_ROOT").keyToString()}' has no field named 'hero")) } @Test diff --git a/tests/normalized-cache/src/commonTest/kotlin/fragmentnormalizer/FragmentNormalizerTest.kt b/tests/normalized-cache/src/commonTest/kotlin/fragmentnormalizer/FragmentNormalizerTest.kt index 9f7e9494..31e02466 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/fragmentnormalizer/FragmentNormalizerTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/fragmentnormalizer/FragmentNormalizerTest.kt @@ -5,6 +5,7 @@ import com.apollographql.apollo.api.CustomScalarAdapters import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.IdCacheKeyGenerator +import com.apollographql.cache.normalized.api.append import com.apollographql.cache.normalized.apolloStore import com.apollographql.cache.normalized.internal.normalized import com.apollographql.cache.normalized.memory.MemoryCacheFactory @@ -95,10 +96,10 @@ class FragmentNormalizerTest { val records = fragment.normalized( ConversationFragmentImpl(), - rootKey = "1", + rootKey = CacheKey("1"), cacheKeyGenerator = IdCacheKeyGenerator(), ) - assertContains(records.keys, CacheKey("1.author")) + assertContains(records.keys, CacheKey("1").append("author")) } } diff --git a/tests/normalized-cache/src/jvmTest/kotlin/CacheMissLoggingInterceptorTest.kt b/tests/normalized-cache/src/jvmTest/kotlin/CacheMissLoggingInterceptorTest.kt index 2455b189..3970edd5 100644 --- a/tests/normalized-cache/src/jvmTest/kotlin/CacheMissLoggingInterceptorTest.kt +++ b/tests/normalized-cache/src/jvmTest/kotlin/CacheMissLoggingInterceptorTest.kt @@ -54,8 +54,8 @@ class CacheMissLoggingInterceptorTest { assertEquals( listOf( - "Object 'QUERY_ROOT' has no field named 'hero'", - "Object '${CacheKey("hero").key}' has no field named 'appearsIn'" + "Object '${CacheKey("QUERY_ROOT").keyToString()}' has no field named 'hero'", + "Object '${CacheKey("hero").keyToString()}' has no field named 'appearsIn'" ), recordedLogs ) diff --git a/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt b/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt index c6a25e1c..a0ac292e 100644 --- a/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt +++ b/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt @@ -19,6 +19,7 @@ import com.apollographql.cache.normalized.api.IdCacheKeyGenerator import com.apollographql.cache.normalized.api.IdCacheKeyResolver import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.SchemaCoordinatesMaxAgeProvider +import com.apollographql.cache.normalized.api.append import com.apollographql.cache.normalized.apolloStore import com.apollographql.cache.normalized.fetchFromCache import com.apollographql.cache.normalized.fetchPolicy @@ -124,7 +125,7 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Object '${CacheKey("User:1").key}' has no field named 'nickName' in the cache") + Error.Builder("Object '${CacheKey("User:1").keyToString()}' has no field named 'nickName' in the cache") .path(listOf("me", "nickName")) .build() ), @@ -344,7 +345,8 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Object '${CacheKey("User:3").key}' not found in the cache").path(listOf("me", "projects", 0, "lead")) + Error.Builder("Object '${CacheKey("User:3").keyToString()}' not found in the cache") + .path(listOf("me", "projects", 0, "lead")) .build() ), cacheResult.errors @@ -382,8 +384,10 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Object '${CacheKey("User:2").key}' not found in the cache").path(listOf("me", "bestFriend")).build(), - Error.Builder("Object '${CacheKey("User:3").key}' not found in the cache").path(listOf("me", "projects", 0, "lead")) + Error.Builder("Object '${CacheKey("User:2").keyToString()}' not found in the cache").path(listOf("me", "bestFriend")) + .build(), + Error.Builder("Object '${CacheKey("User:3").keyToString()}' not found in the cache") + .path(listOf("me", "projects", 0, "lead")) .build(), ), cacheResult2.errors @@ -398,10 +402,13 @@ class CachePartialResultTest { assertNull(cacheResult3.data) assertErrorsEquals( listOf( - Error.Builder("Object '${CacheKey("User:2").key}' not found in the cache").path(listOf("me", "bestFriend")).build(), - Error.Builder("Object '${CacheKey("User:3").key}' not found in the cache").path(listOf("me", "projects", 0, "lead")) + Error.Builder("Object '${CacheKey("User:2").keyToString()}' not found in the cache").path(listOf("me", "bestFriend")) + .build(), + Error.Builder("Object '${CacheKey("User:3").keyToString()}' not found in the cache") + .path(listOf("me", "projects", 0, "lead")) .build(), - Error.Builder("Object '${CacheKey("User:4").key}' not found in the cache").path(listOf("me", "projects", 0, "users", 0)) + Error.Builder("Object '${CacheKey("User:4").keyToString()}' not found in the cache") + .path(listOf("me", "projects", 0, "users", 0)) .build() ), cacheResult3.errors @@ -546,7 +553,7 @@ class CachePartialResultTest { assertNull(cacheMissResult.data) assertErrorsEquals( listOf( - Error.Builder("Object '${CacheKey("User:1").key}' has no field named 'category' in the cache") + Error.Builder("Object '${CacheKey("User:1").keyToString()}' has no field named 'category' in the cache") .path(listOf("user", "category")) .build() ), @@ -675,7 +682,8 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Object '${CacheKey("User:2").key}' not found in the cache").path(listOf("me", "mainProject", "lead0")) + Error.Builder("Object '${CacheKey("User:2").keyToString()}' not found in the cache") + .path(listOf("me", "mainProject", "lead0")) .build() ), cacheMissResult.errors @@ -740,7 +748,7 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Field 'nickName' on object '${CacheKey("User:1").key}' is stale in the cache") + Error.Builder("Field 'nickName' on object '${CacheKey("User:1").keyToString()}' is stale in the cache") .path(listOf("me", "nickName")) .build() ), @@ -806,7 +814,10 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Field 'salary' on object '${CacheKey("${CacheKey("User:1").key}.employeeInfo").key}' is stale in the cache") + Error.Builder("Field 'salary' on object '${ + CacheKey("User:1").append("employeeInfo").keyToString() + }' is stale in the cache" + ) .path(listOf("me", "employeeInfo", "salary")).build() ), cacheMissResult.errors From 4f6c58d6b555aef1884d71eef8d6b4fd036b9b39 Mon Sep 17 00:00:00 2001 From: BoD Date: Mon, 24 Mar 2025 19:37:17 +0100 Subject: [PATCH 11/29] Revert 1 field per row, and hashes --- .../api/normalized-cache-incubating.api | 96 +++--- .../api/normalized-cache-incubating.klib.api | 6 +- .../cache/normalized/api/CacheKey.kt | 30 +- .../normalized/internal/RecordWeigher.kt | 4 +- .../cache/normalized/memory/MemoryCache.kt | 2 +- .../normalized-cache-sqlite-incubating.api | 84 +++-- .../normalized-cache-sqlite-incubating.api | 84 +++-- ...ormalized-cache-sqlite-incubating.klib.api | 115 ++++--- .../build.gradle.kts | 8 +- .../sqldelight/{fields => record}/schema/1.db | Bin 8192 -> 8192 bytes .../sqldelight/{fields => record}/schema/2.db | Bin 8192 -> 8192 bytes .../normalized/sql/SqlNormalizedCache.kt | 60 ++-- .../internal/ApolloJsonElementSerializer.kt | 244 ------------- .../normalized/sql/internal/RecordDatabase.kt | 106 +----- .../sql/internal/RecordSerializer.kt | 321 ++++++++++++++++++ .../normalized/sql/internal/factoryHelpers.kt | 6 +- .../normalized/sql/internal/fields/fields.sq | 41 --- .../sqldelight/fields/com/migrations/1.sqm | 13 - .../normalized/sql/internal/record/record.sq | 31 ++ .../sqldelight/record/com/migrations/1.sqm | 10 + 20 files changed, 634 insertions(+), 627 deletions(-) rename normalized-cache-sqlite-incubating/sqldelight/{fields => record}/schema/1.db (96%) rename normalized-cache-sqlite-incubating/sqldelight/{fields => record}/schema/2.db (96%) delete mode 100644 normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/ApolloJsonElementSerializer.kt create mode 100644 normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordSerializer.kt delete mode 100644 normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/apollographql/cache/normalized/sql/internal/fields/fields.sq delete mode 100644 normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/migrations/1.sqm create mode 100644 normalized-cache-sqlite-incubating/src/commonMain/sqldelight/record/com/apollographql/cache/normalized/sql/internal/record/record.sq create mode 100644 normalized-cache-sqlite-incubating/src/commonMain/sqldelight/record/com/migrations/1.sqm diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.api b/normalized-cache-incubating/api/normalized-cache-incubating.api index b2a2866d..2b823024 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.api @@ -5,18 +5,18 @@ public abstract interface class com/apollographql/cache/normalized/ApolloStore { public abstract fun dispose ()V public abstract fun dump ()Ljava/util/Map; public abstract fun getChangedKeys ()Lkotlinx/coroutines/flow/SharedFlow; - public abstract fun normalize-niOPdRo (Lcom/apollographql/apollo/api/Executable;Ljava/util/Map;Lokio/ByteString;Lcom/apollographql/apollo/api/CustomScalarAdapters;)Ljava/util/Map; + public abstract fun normalize-niOPdRo (Lcom/apollographql/apollo/api/Executable;Ljava/util/Map;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;)Ljava/util/Map; public abstract fun publish (Ljava/util/Set;Lkotlin/coroutines/Continuation;)Ljava/lang/Object; - public abstract fun readFragment-dEpVOtE (Lcom/apollographql/apollo/api/Fragment;Lokio/ByteString;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/ApolloStore$ReadResult; + public abstract fun readFragment-dEpVOtE (Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/ApolloStore$ReadResult; public abstract fun readOperation (Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/apollo/api/ApolloResponse; public abstract fun remove (Ljava/util/List;Z)I - public abstract fun remove-eNSUWrY (Lokio/ByteString;Z)Z + public abstract fun remove-eNSUWrY (Ljava/lang/String;Z)Z public abstract fun rollbackOptimisticUpdates (Ljava/util/UUID;)Ljava/util/Set; - public abstract fun writeFragment-1qdIjGk (Lcom/apollographql/apollo/api/Fragment;Lokio/ByteString;Lcom/apollographql/apollo/api/Fragment$Data;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Set; + public abstract fun writeFragment-1qdIjGk (Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/Fragment$Data;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Set; public abstract fun writeOperation (Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/Operation$Data;Ljava/util/List;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Set; public abstract fun writeOperation (Lcom/apollographql/apollo/api/Operation;Ljava/util/Map;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Set; public abstract fun writeOptimisticUpdates (Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/Operation$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;)Ljava/util/Set; - public abstract fun writeOptimisticUpdates-1qdIjGk (Lcom/apollographql/apollo/api/Fragment;Lokio/ByteString;Lcom/apollographql/apollo/api/Fragment$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;)Ljava/util/Set; + public abstract fun writeOptimisticUpdates-1qdIjGk (Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/Fragment$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;)Ljava/util/Set; } public final class com/apollographql/cache/normalized/ApolloStore$Companion { @@ -24,16 +24,16 @@ public final class com/apollographql/cache/normalized/ApolloStore$Companion { } public final class com/apollographql/cache/normalized/ApolloStore$DefaultImpls { - public static synthetic fun normalize-niOPdRo$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Executable;Ljava/util/Map;Lokio/ByteString;Lcom/apollographql/apollo/api/CustomScalarAdapters;ILjava/lang/Object;)Ljava/util/Map; - public static synthetic fun readFragment-dEpVOtE$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Lokio/ByteString;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/ApolloStore$ReadResult; + public static synthetic fun normalize-niOPdRo$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Executable;Ljava/util/Map;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;ILjava/lang/Object;)Ljava/util/Map; + public static synthetic fun readFragment-dEpVOtE$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/ApolloStore$ReadResult; public static synthetic fun readOperation$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Lcom/apollographql/apollo/api/ApolloResponse; public static synthetic fun remove$default (Lcom/apollographql/cache/normalized/ApolloStore;Ljava/util/List;ZILjava/lang/Object;)I - public static synthetic fun remove-eNSUWrY$default (Lcom/apollographql/cache/normalized/ApolloStore;Lokio/ByteString;ZILjava/lang/Object;)Z - public static synthetic fun writeFragment-1qdIjGk$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Lokio/ByteString;Lcom/apollographql/apollo/api/Fragment$Data;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Ljava/util/Set; + public static synthetic fun remove-eNSUWrY$default (Lcom/apollographql/cache/normalized/ApolloStore;Ljava/lang/String;ZILjava/lang/Object;)Z + public static synthetic fun writeFragment-1qdIjGk$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/Fragment$Data;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Ljava/util/Set; public static synthetic fun writeOperation$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/Operation$Data;Ljava/util/List;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Ljava/util/Set; public static synthetic fun writeOperation$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Operation;Ljava/util/Map;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Ljava/util/Set; public static synthetic fun writeOptimisticUpdates$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/Operation$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;ILjava/lang/Object;)Ljava/util/Set; - public static synthetic fun writeOptimisticUpdates-1qdIjGk$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Lokio/ByteString;Lcom/apollographql/apollo/api/Fragment$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;ILjava/lang/Object;)Ljava/util/Set; + public static synthetic fun writeOptimisticUpdates-1qdIjGk$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/Fragment$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;ILjava/lang/Object;)Ljava/util/Set; } public final class com/apollographql/cache/normalized/ApolloStore$ReadResult { @@ -227,36 +227,34 @@ public final class com/apollographql/cache/normalized/api/CacheHeaders$Companion public final class com/apollographql/cache/normalized/api/CacheKey { public static final field Companion Lcom/apollographql/cache/normalized/api/CacheKey$Companion; - public static final field HASH_SIZE_BYTES I - public static final synthetic fun box-impl (Lokio/ByteString;)Lcom/apollographql/cache/normalized/api/CacheKey; + public static final synthetic fun box-impl (Ljava/lang/String;)Lcom/apollographql/cache/normalized/api/CacheKey; public static final fun canDeserialize (Ljava/lang/String;)Z - public static fun constructor-impl (Ljava/lang/String;)Lokio/ByteString; - public static fun constructor-impl (Ljava/lang/String;Ljava/util/List;)Lokio/ByteString; - public static fun constructor-impl (Ljava/lang/String;[Ljava/lang/String;)Lokio/ByteString; - public static fun constructor-impl (Lokio/ByteString;)Lokio/ByteString; - public static final fun deserialize-gE2UBb4 (Ljava/lang/String;)Lokio/ByteString; + public static fun constructor-impl (Ljava/lang/String;)Ljava/lang/String; + public static fun constructor-impl (Ljava/lang/String;Ljava/util/List;)Ljava/lang/String; + public static fun constructor-impl (Ljava/lang/String;[Ljava/lang/String;)Ljava/lang/String; + public static final fun deserialize-gE2UBb4 (Ljava/lang/String;)Ljava/lang/String; public fun equals (Ljava/lang/Object;)Z - public static fun equals-impl (Lokio/ByteString;Ljava/lang/Object;)Z - public static final fun equals-impl0 (Lokio/ByteString;Lokio/ByteString;)Z - public final fun getKey ()Lokio/ByteString; + public static fun equals-impl (Ljava/lang/String;Ljava/lang/Object;)Z + public static final fun equals-impl0 (Ljava/lang/String;Ljava/lang/String;)Z + public final fun getKey ()Ljava/lang/String; public fun hashCode ()I - public static fun hashCode-impl (Lokio/ByteString;)I - public static final fun keyToString-impl (Lokio/ByteString;)Ljava/lang/String; - public static final fun rootKey-mqw0cJ0 ()Lokio/ByteString; - public static final fun serialize-impl (Lokio/ByteString;)Ljava/lang/String; + public static fun hashCode-impl (Ljava/lang/String;)I + public static final fun keyToString-impl (Ljava/lang/String;)Ljava/lang/String; + public static final fun rootKey-mqw0cJ0 ()Ljava/lang/String; + public static final fun serialize-impl (Ljava/lang/String;)Ljava/lang/String; public fun toString ()Ljava/lang/String; - public static fun toString-impl (Lokio/ByteString;)Ljava/lang/String; - public final synthetic fun unbox-impl ()Lokio/ByteString; + public static fun toString-impl (Ljava/lang/String;)Ljava/lang/String; + public final synthetic fun unbox-impl ()Ljava/lang/String; } public final class com/apollographql/cache/normalized/api/CacheKey$Companion { public final fun canDeserialize (Ljava/lang/String;)Z - public final fun deserialize-gE2UBb4 (Ljava/lang/String;)Lokio/ByteString; - public final fun rootKey-mqw0cJ0 ()Lokio/ByteString; + public final fun deserialize-gE2UBb4 (Ljava/lang/String;)Ljava/lang/String; + public final fun rootKey-mqw0cJ0 ()Ljava/lang/String; } public abstract interface class com/apollographql/cache/normalized/api/CacheKeyGenerator { - public abstract fun cacheKeyForObject-z2_y8R0 (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Lokio/ByteString; + public abstract fun cacheKeyForObject-z2_y8R0 (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Ljava/lang/String; } public final class com/apollographql/cache/normalized/api/CacheKeyGeneratorContext { @@ -266,14 +264,14 @@ public final class com/apollographql/cache/normalized/api/CacheKeyGeneratorConte } public final class com/apollographql/cache/normalized/api/CacheKeyKt { - public static final fun append-eNSUWrY (Lokio/ByteString;[Ljava/lang/String;)Lokio/ByteString; - public static final fun fieldKey-eNSUWrY (Lokio/ByteString;Ljava/lang/String;)Ljava/lang/String; - public static final fun isRootKey-pWl1Des (Lokio/ByteString;)Z + public static final fun append-eNSUWrY (Ljava/lang/String;[Ljava/lang/String;)Ljava/lang/String; + public static final fun fieldKey-eNSUWrY (Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String; + public static final fun isRootKey-pWl1Des (Ljava/lang/String;)Z } public abstract class com/apollographql/cache/normalized/api/CacheKeyResolver : com/apollographql/cache/normalized/api/CacheResolver { public fun ()V - public abstract fun cacheKeyForField-fLoEQYY (Lcom/apollographql/cache/normalized/api/ResolverContext;)Lokio/ByteString; + public abstract fun cacheKeyForField-fLoEQYY (Lcom/apollographql/cache/normalized/api/ResolverContext;)Ljava/lang/String; public fun listOfCacheKeysForField (Lcom/apollographql/cache/normalized/api/ResolverContext;)Ljava/util/List; public final fun resolveField (Lcom/apollographql/cache/normalized/api/ResolverContext;)Ljava/lang/Object; } @@ -401,14 +399,14 @@ public final class com/apollographql/cache/normalized/api/IdCacheKeyGenerator : public fun ()V public fun ([Ljava/lang/String;)V public synthetic fun ([Ljava/lang/String;ILkotlin/jvm/internal/DefaultConstructorMarker;)V - public fun cacheKeyForObject-z2_y8R0 (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Lokio/ByteString; + public fun cacheKeyForObject-z2_y8R0 (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Ljava/lang/String; } public final class com/apollographql/cache/normalized/api/IdCacheKeyResolver : com/apollographql/cache/normalized/api/CacheKeyResolver { public fun ()V public fun (Ljava/util/List;Ljava/util/List;)V public synthetic fun (Ljava/util/List;Ljava/util/List;ILkotlin/jvm/internal/DefaultConstructorMarker;)V - public fun cacheKeyForField-fLoEQYY (Lcom/apollographql/cache/normalized/api/ResolverContext;)Lokio/ByteString; + public fun cacheKeyForField-fLoEQYY (Lcom/apollographql/cache/normalized/api/ResolverContext;)Ljava/lang/String; public fun listOfCacheKeysForField (Lcom/apollographql/cache/normalized/api/ResolverContext;)Ljava/util/List; } @@ -462,7 +460,7 @@ public abstract interface class com/apollographql/cache/normalized/api/Normalize public abstract fun merge (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public static fun prettifyDump (Ljava/util/Map;)Ljava/lang/String; public abstract fun remove (Ljava/util/Collection;Z)I - public abstract fun remove-eNSUWrY (Lokio/ByteString;Z)Z + public abstract fun remove-eNSUWrY (Ljava/lang/String;Z)Z public abstract fun trim (JF)J } @@ -482,14 +480,14 @@ public abstract class com/apollographql/cache/normalized/api/NormalizedCacheFact public abstract interface class com/apollographql/cache/normalized/api/ReadOnlyNormalizedCache { public abstract fun dump ()Ljava/util/Map; - public abstract fun loadRecord-eNSUWrY (Lokio/ByteString;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; + public abstract fun loadRecord-eNSUWrY (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; public abstract fun loadRecords (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Collection; } public final class com/apollographql/cache/normalized/api/Record : java/util/Map, kotlin/jvm/internal/markers/KMappedMarker { public static final field Companion Lcom/apollographql/cache/normalized/api/Record$Companion; - public synthetic fun (Lokio/ByteString;Ljava/util/Map;Ljava/util/UUID;Ljava/util/Map;ILkotlin/jvm/internal/DefaultConstructorMarker;)V - public synthetic fun (Lokio/ByteString;Ljava/util/Map;Ljava/util/UUID;Ljava/util/Map;Lkotlin/jvm/internal/DefaultConstructorMarker;)V + public synthetic fun (Ljava/lang/String;Ljava/util/Map;Ljava/util/UUID;Ljava/util/Map;ILkotlin/jvm/internal/DefaultConstructorMarker;)V + public synthetic fun (Ljava/lang/String;Ljava/util/Map;Ljava/util/UUID;Ljava/util/Map;Lkotlin/jvm/internal/DefaultConstructorMarker;)V public fun clear ()V public synthetic fun compute (Ljava/lang/Object;Ljava/util/function/BiFunction;)Ljava/lang/Object; public fun compute (Ljava/lang/String;Ljava/util/function/BiFunction;)Ljava/lang/Object; @@ -506,7 +504,7 @@ public final class com/apollographql/cache/normalized/api/Record : java/util/Map public fun get (Ljava/lang/String;)Ljava/lang/Object; public fun getEntries ()Ljava/util/Set; public final fun getFields ()Ljava/util/Map; - public final fun getKey-mqw0cJ0 ()Lokio/ByteString; + public final fun getKey-mqw0cJ0 ()Ljava/lang/String; public fun getKeys ()Ljava/util/Set; public final fun getMetadata ()Ljava/util/Map; public final fun getMutationId ()Ljava/util/UUID; @@ -561,12 +559,12 @@ public final class com/apollographql/cache/normalized/api/RecordMergerKt { } public final class com/apollographql/cache/normalized/api/ResolverContext { - public synthetic fun (Lcom/apollographql/apollo/api/CompiledField;Lcom/apollographql/apollo/api/Executable$Variables;Ljava/util/Map;Lokio/ByteString;Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Ljava/util/List;Lkotlin/jvm/internal/DefaultConstructorMarker;)V + public synthetic fun (Lcom/apollographql/apollo/api/CompiledField;Lcom/apollographql/apollo/api/Executable$Variables;Ljava/util/Map;Ljava/lang/String;Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Ljava/util/List;Lkotlin/jvm/internal/DefaultConstructorMarker;)V public final fun getCacheHeaders ()Lcom/apollographql/cache/normalized/api/CacheHeaders; public final fun getField ()Lcom/apollographql/apollo/api/CompiledField; public final fun getFieldKeyGenerator ()Lcom/apollographql/cache/normalized/api/FieldKeyGenerator; public final fun getParent ()Ljava/util/Map; - public final fun getParentKey-mqw0cJ0 ()Lokio/ByteString; + public final fun getParentKey-mqw0cJ0 ()Ljava/lang/String; public final fun getParentType ()Ljava/lang/String; public final fun getPath ()Ljava/util/List; public final fun getVariables ()Lcom/apollographql/apollo/api/Executable$Variables; @@ -579,14 +577,14 @@ public final class com/apollographql/cache/normalized/api/SchemaCoordinatesMaxAg public final class com/apollographql/cache/normalized/api/TypePolicyCacheKeyGenerator : com/apollographql/cache/normalized/api/CacheKeyGenerator { public static final field INSTANCE Lcom/apollographql/cache/normalized/api/TypePolicyCacheKeyGenerator; - public fun cacheKeyForObject-z2_y8R0 (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Lokio/ByteString; + public fun cacheKeyForObject-z2_y8R0 (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Ljava/lang/String; } public final class com/apollographql/cache/normalized/internal/NormalizerKt { - public static final fun normalized-MplSeLY (Lcom/apollographql/apollo/api/Executable$Data;Lcom/apollographql/apollo/api/Executable;Lokio/ByteString;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;)Ljava/util/Map; - public static final fun normalized-MplSeLY (Ljava/util/Map;Lcom/apollographql/apollo/api/Executable;Lokio/ByteString;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;)Ljava/util/Map; - public static synthetic fun normalized-MplSeLY$default (Lcom/apollographql/apollo/api/Executable$Data;Lcom/apollographql/apollo/api/Executable;Lokio/ByteString;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;ILjava/lang/Object;)Ljava/util/Map; - public static synthetic fun normalized-MplSeLY$default (Ljava/util/Map;Lcom/apollographql/apollo/api/Executable;Lokio/ByteString;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;ILjava/lang/Object;)Ljava/util/Map; + public static final fun normalized-MplSeLY (Lcom/apollographql/apollo/api/Executable$Data;Lcom/apollographql/apollo/api/Executable;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;)Ljava/util/Map; + public static final fun normalized-MplSeLY (Ljava/util/Map;Lcom/apollographql/apollo/api/Executable;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;)Ljava/util/Map; + public static synthetic fun normalized-MplSeLY$default (Lcom/apollographql/apollo/api/Executable$Data;Lcom/apollographql/apollo/api/Executable;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;ILjava/lang/Object;)Ljava/util/Map; + public static synthetic fun normalized-MplSeLY$default (Ljava/util/Map;Lcom/apollographql/apollo/api/Executable;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;ILjava/lang/Object;)Ljava/util/Map; } public final class com/apollographql/cache/normalized/memory/MemoryCache : com/apollographql/cache/normalized/api/NormalizedCache { @@ -596,12 +594,12 @@ public final class com/apollographql/cache/normalized/memory/MemoryCache : com/a public fun clearAll ()V public fun dump ()Ljava/util/Map; public final fun getSize ()I - public fun loadRecord-eNSUWrY (Lokio/ByteString;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; + public fun loadRecord-eNSUWrY (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; public fun loadRecords (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Collection; public fun merge (Lcom/apollographql/cache/normalized/api/Record;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public fun merge (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public fun remove (Ljava/util/Collection;Z)I - public fun remove-eNSUWrY (Lokio/ByteString;Z)Z + public fun remove-eNSUWrY (Ljava/lang/String;Z)Z public fun trim (JF)J } diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api index 2265b2a8..11f6118e 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api @@ -464,10 +464,9 @@ final value class com.apollographql.cache.normalized.api/CacheKey { // com.apoll constructor (kotlin/String) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String){}[0] constructor (kotlin/String, kotlin.collections/List) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String;kotlin.collections.List){}[0] constructor (kotlin/String, kotlin/Array...) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String;kotlin.Array...){}[0] - constructor (okio/ByteString) // com.apollographql.cache.normalized.api/CacheKey.|(okio.ByteString){}[0] final val key // com.apollographql.cache.normalized.api/CacheKey.key|{}key[0] - final fun (): okio/ByteString // com.apollographql.cache.normalized.api/CacheKey.key.|(){}[0] + final fun (): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.key.|(){}[0] final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.api/CacheKey.equals|equals(kotlin.Any?){}[0] final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.api/CacheKey.hashCode|hashCode(){}[0] @@ -476,9 +475,6 @@ final value class com.apollographql.cache.normalized.api/CacheKey { // com.apoll final fun toString(): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.toString|toString(){}[0] final object Companion { // com.apollographql.cache.normalized.api/CacheKey.Companion|null[0] - final const val HASH_SIZE_BYTES // com.apollographql.cache.normalized.api/CacheKey.Companion.HASH_SIZE_BYTES|{}HASH_SIZE_BYTES[0] - final fun (): kotlin/Int // com.apollographql.cache.normalized.api/CacheKey.Companion.HASH_SIZE_BYTES.|(){}[0] - final fun canDeserialize(kotlin/String): kotlin/Boolean // com.apollographql.cache.normalized.api/CacheKey.Companion.canDeserialize|canDeserialize(kotlin.String){}[0] final fun deserialize(kotlin/String): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.api/CacheKey.Companion.deserialize|deserialize(kotlin.String){}[0] final fun rootKey(): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.api/CacheKey.Companion.rootKey|rootKey(){}[0] diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt index 91444b76..0243680f 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt @@ -1,10 +1,6 @@ package com.apollographql.cache.normalized.api import com.apollographql.apollo.annotations.ApolloInternal -import com.apollographql.cache.normalized.api.CacheKey.Companion.HASH_SIZE_BYTES -import okio.Buffer -import okio.ByteString -import okio.ByteString.Companion.encodeUtf8 import kotlin.jvm.JvmInline import kotlin.jvm.JvmStatic @@ -14,17 +10,10 @@ import kotlin.jvm.JvmStatic @JvmInline value class CacheKey( /** - * The hashed key of the object in the cache. + * The key of the object in the cache. */ - val key: ByteString, + val key: String, ) { - /** - * Builds a [CacheKey] from a key. - * - * @param key The key of the object in the cache. The key must be globally unique. - */ - constructor(key: String) : this(key.hashed()) - /** * Builds a [CacheKey] from a typename and a list of Strings. * @@ -48,7 +37,7 @@ value class CacheKey( constructor(typename: String, vararg values: String) : this(typename, values.toList()) fun keyToString(): String { - return key.hex() + return key } override fun toString() = "CacheKey(${keyToString()})" @@ -84,9 +73,6 @@ value class CacheKey( fun rootKey(): CacheKey { return ROOT_CACHE_KEY } - - @ApolloInternal - const val HASH_SIZE_BYTES = 10 } } @@ -99,19 +85,11 @@ fun CacheKey.fieldKey(fieldName: String): String { return "${keyToString()}.$fieldName" } -private fun String.hashed(): ByteString { - return encodeUtf8().hashed() -} - -private fun ByteString.hashed(): ByteString { - return sha256().substring(endIndex = HASH_SIZE_BYTES) -} - @ApolloInternal fun CacheKey.append(vararg keys: String): CacheKey { var cacheKey: CacheKey = this for (key in keys) { - cacheKey = CacheKey(Buffer().write(cacheKey.key).write(key.encodeUtf8()).readByteString().hashed()) + cacheKey = CacheKey("${cacheKey.key}.$key") } return cacheKey } diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/RecordWeigher.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/RecordWeigher.kt index b5fce288..b4f367b9 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/RecordWeigher.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/RecordWeigher.kt @@ -27,7 +27,7 @@ internal object RecordWeigher { @JvmStatic fun calculateBytes(record: Record): Int { - var size = SIZE_OF_RECORD_OVERHEAD + record.key.key.size + var size = SIZE_OF_RECORD_OVERHEAD + record.key.key.length for ((key, value) in record.fields) { size += key.length + weighField(value) } @@ -56,7 +56,7 @@ internal object RecordWeigher { } is CacheKey -> { - SIZE_OF_CACHE_KEY_OVERHEAD + field.key.size + SIZE_OF_CACHE_KEY_OVERHEAD + field.key.length } is Error -> { diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt index 7108df76..7bc43088 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt @@ -40,7 +40,7 @@ class MemoryCache( } private val lruCache = LruCache(maxSize = maxSizeBytes, expireAfterMillis = expireAfterMillis) { key, record -> - key.key.size + record.sizeInBytes + key.key.length + record.sizeInBytes } val size: Int diff --git a/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api b/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api index 25bc42b8..66bc8614 100644 --- a/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api +++ b/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api @@ -12,12 +12,12 @@ public final class com/apollographql/cache/normalized/sql/ApolloInitializer$Comp public final class com/apollographql/cache/normalized/sql/SqlNormalizedCache : com/apollographql/cache/normalized/api/NormalizedCache { public fun clearAll ()V public fun dump ()Ljava/util/Map; - public fun loadRecord-eNSUWrY (Lokio/ByteString;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; + public fun loadRecord-eNSUWrY (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; public fun loadRecords (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Collection; public fun merge (Lcom/apollographql/cache/normalized/api/Record;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public fun merge (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public fun remove (Ljava/util/Collection;Z)I - public fun remove-eNSUWrY (Lokio/ByteString;Z)Z + public fun remove-eNSUWrY (Ljava/lang/String;Z)Z public fun trim (JF)J } @@ -38,48 +38,68 @@ public final class com/apollographql/cache/normalized/sql/VersionKt { public static final field VERSION Ljava/lang/String; } -public final class com/apollographql/cache/normalized/sql/internal/fields/Field_ { - public fun ([BLjava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)V - public final fun component1 ()[B - public final fun component2 ()Ljava/lang/String; - public final fun component3 ()[B - public final fun component4 ()[B - public final fun component5 ()Ljava/lang/Long; - public final fun component6 ()Ljava/lang/Long; - public final fun copy ([BLjava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)Lcom/apollographql/cache/normalized/sql/internal/fields/Field_; - public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/fields/Field_;[BLjava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/fields/Field_; +public final class com/apollographql/cache/normalized/sql/internal/record/Record { + public fun (Ljava/lang/String;[BJ)V + public final fun component1 ()Ljava/lang/String; + public final fun component2 ()[B + public final fun component3 ()J + public final fun copy (Ljava/lang/String;[BJ)Lcom/apollographql/cache/normalized/sql/internal/record/Record; + public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/record/Record;Ljava/lang/String;[BJILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/record/Record; public fun equals (Ljava/lang/Object;)Z - public final fun getExpiration_date ()Ljava/lang/Long; - public final fun getField_ ()Ljava/lang/String; - public final fun getMetadata ()[B - public final fun getReceived_date ()Ljava/lang/Long; + public final fun getKey ()Ljava/lang/String; public final fun getRecord ()[B - public final fun getValue_ ()[B + public final fun getUpdate_date ()J public fun hashCode ()I public fun toString ()Ljava/lang/String; } -public abstract interface class com/apollographql/cache/normalized/sql/internal/fields/FieldsDatabase : app/cash/sqldelight/Transacter { - public static final field Companion Lcom/apollographql/cache/normalized/sql/internal/fields/FieldsDatabase$Companion; - public abstract fun getFieldsQueries ()Lcom/apollographql/cache/normalized/sql/internal/fields/FieldsQueries; -} - -public final class com/apollographql/cache/normalized/sql/internal/fields/FieldsDatabase$Companion { - public final fun getSchema ()Lapp/cash/sqldelight/db/SqlSchema; - public final fun invoke (Lapp/cash/sqldelight/db/SqlDriver;)Lcom/apollographql/cache/normalized/sql/internal/fields/FieldsDatabase; -} - -public final class com/apollographql/cache/normalized/sql/internal/fields/FieldsQueries : app/cash/sqldelight/TransacterImpl { +public final class com/apollographql/cache/normalized/sql/internal/record/RecordQueries : app/cash/sqldelight/TransacterImpl { public fun (Lapp/cash/sqldelight/db/SqlDriver;)V public final fun changes ()Lapp/cash/sqldelight/ExecutableQuery; public final fun count ()Lapp/cash/sqldelight/Query; public final fun deleteAllRecords ()V public final fun deleteRecords (Ljava/util/Collection;)V - public final fun insertOrUpdateField ([BLjava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)V + public final fun insertOrUpdateRecord (Ljava/lang/String;[BJ)V public final fun selectAllRecords ()Lapp/cash/sqldelight/Query; - public final fun selectAllRecords (Lkotlin/jvm/functions/Function6;)Lapp/cash/sqldelight/Query; + public final fun selectAllRecords (Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; public final fun selectRecords (Ljava/util/Collection;)Lapp/cash/sqldelight/Query; - public final fun selectRecords (Ljava/util/Collection;Lkotlin/jvm/functions/Function6;)Lapp/cash/sqldelight/Query; - public final fun trimByReceivedDate (J)V + public final fun selectRecords (Ljava/util/Collection;Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; + public final fun trimByUpdateDate (J)V +} + +public final class com/apollographql/cache/normalized/sql/internal/record/SelectAllRecords { + public fun (Ljava/lang/String;[B)V + public final fun component1 ()Ljava/lang/String; + public final fun component2 ()[B + public final fun copy (Ljava/lang/String;[B)Lcom/apollographql/cache/normalized/sql/internal/record/SelectAllRecords; + public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/record/SelectAllRecords;Ljava/lang/String;[BILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/record/SelectAllRecords; + public fun equals (Ljava/lang/Object;)Z + public final fun getKey ()Ljava/lang/String; + public final fun getRecord ()[B + public fun hashCode ()I + public fun toString ()Ljava/lang/String; +} + +public final class com/apollographql/cache/normalized/sql/internal/record/SelectRecords { + public fun (Ljava/lang/String;[B)V + public final fun component1 ()Ljava/lang/String; + public final fun component2 ()[B + public final fun copy (Ljava/lang/String;[B)Lcom/apollographql/cache/normalized/sql/internal/record/SelectRecords; + public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/record/SelectRecords;Ljava/lang/String;[BILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/record/SelectRecords; + public fun equals (Ljava/lang/Object;)Z + public final fun getKey ()Ljava/lang/String; + public final fun getRecord ()[B + public fun hashCode ()I + public fun toString ()Ljava/lang/String; +} + +public abstract interface class com/apollographql/cache/normalized/sql/internal/record/SqlRecordDatabase : app/cash/sqldelight/Transacter { + public static final field Companion Lcom/apollographql/cache/normalized/sql/internal/record/SqlRecordDatabase$Companion; + public abstract fun getRecordQueries ()Lcom/apollographql/cache/normalized/sql/internal/record/RecordQueries; +} + +public final class com/apollographql/cache/normalized/sql/internal/record/SqlRecordDatabase$Companion { + public final fun getSchema ()Lapp/cash/sqldelight/db/SqlSchema; + public final fun invoke (Lapp/cash/sqldelight/db/SqlDriver;)Lcom/apollographql/cache/normalized/sql/internal/record/SqlRecordDatabase; } diff --git a/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api b/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api index 69b0699f..885fb475 100644 --- a/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api +++ b/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api @@ -1,12 +1,12 @@ public final class com/apollographql/cache/normalized/sql/SqlNormalizedCache : com/apollographql/cache/normalized/api/NormalizedCache { public fun clearAll ()V public fun dump ()Ljava/util/Map; - public fun loadRecord-eNSUWrY (Lokio/ByteString;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; + public fun loadRecord-eNSUWrY (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; public fun loadRecords (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Collection; public fun merge (Lcom/apollographql/cache/normalized/api/Record;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public fun merge (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public fun remove (Ljava/util/Collection;Z)I - public fun remove-eNSUWrY (Lokio/ByteString;Z)Z + public fun remove-eNSUWrY (Ljava/lang/String;Z)Z public fun trim (JF)J } @@ -23,48 +23,68 @@ public final class com/apollographql/cache/normalized/sql/VersionKt { public static final field VERSION Ljava/lang/String; } -public final class com/apollographql/cache/normalized/sql/internal/fields/Field_ { - public fun ([BLjava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)V - public final fun component1 ()[B - public final fun component2 ()Ljava/lang/String; - public final fun component3 ()[B - public final fun component4 ()[B - public final fun component5 ()Ljava/lang/Long; - public final fun component6 ()Ljava/lang/Long; - public final fun copy ([BLjava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)Lcom/apollographql/cache/normalized/sql/internal/fields/Field_; - public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/fields/Field_;[BLjava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/fields/Field_; +public final class com/apollographql/cache/normalized/sql/internal/record/Record { + public fun (Ljava/lang/String;[BJ)V + public final fun component1 ()Ljava/lang/String; + public final fun component2 ()[B + public final fun component3 ()J + public final fun copy (Ljava/lang/String;[BJ)Lcom/apollographql/cache/normalized/sql/internal/record/Record; + public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/record/Record;Ljava/lang/String;[BJILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/record/Record; public fun equals (Ljava/lang/Object;)Z - public final fun getExpiration_date ()Ljava/lang/Long; - public final fun getField_ ()Ljava/lang/String; - public final fun getMetadata ()[B - public final fun getReceived_date ()Ljava/lang/Long; + public final fun getKey ()Ljava/lang/String; public final fun getRecord ()[B - public final fun getValue_ ()[B + public final fun getUpdate_date ()J public fun hashCode ()I public fun toString ()Ljava/lang/String; } -public abstract interface class com/apollographql/cache/normalized/sql/internal/fields/FieldsDatabase : app/cash/sqldelight/Transacter { - public static final field Companion Lcom/apollographql/cache/normalized/sql/internal/fields/FieldsDatabase$Companion; - public abstract fun getFieldsQueries ()Lcom/apollographql/cache/normalized/sql/internal/fields/FieldsQueries; -} - -public final class com/apollographql/cache/normalized/sql/internal/fields/FieldsDatabase$Companion { - public final fun getSchema ()Lapp/cash/sqldelight/db/SqlSchema; - public final fun invoke (Lapp/cash/sqldelight/db/SqlDriver;)Lcom/apollographql/cache/normalized/sql/internal/fields/FieldsDatabase; -} - -public final class com/apollographql/cache/normalized/sql/internal/fields/FieldsQueries : app/cash/sqldelight/TransacterImpl { +public final class com/apollographql/cache/normalized/sql/internal/record/RecordQueries : app/cash/sqldelight/TransacterImpl { public fun (Lapp/cash/sqldelight/db/SqlDriver;)V public final fun changes ()Lapp/cash/sqldelight/ExecutableQuery; public final fun count ()Lapp/cash/sqldelight/Query; public final fun deleteAllRecords ()V public final fun deleteRecords (Ljava/util/Collection;)V - public final fun insertOrUpdateField ([BLjava/lang/String;[B[BLjava/lang/Long;Ljava/lang/Long;)V + public final fun insertOrUpdateRecord (Ljava/lang/String;[BJ)V public final fun selectAllRecords ()Lapp/cash/sqldelight/Query; - public final fun selectAllRecords (Lkotlin/jvm/functions/Function6;)Lapp/cash/sqldelight/Query; + public final fun selectAllRecords (Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; public final fun selectRecords (Ljava/util/Collection;)Lapp/cash/sqldelight/Query; - public final fun selectRecords (Ljava/util/Collection;Lkotlin/jvm/functions/Function6;)Lapp/cash/sqldelight/Query; - public final fun trimByReceivedDate (J)V + public final fun selectRecords (Ljava/util/Collection;Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; + public final fun trimByUpdateDate (J)V +} + +public final class com/apollographql/cache/normalized/sql/internal/record/SelectAllRecords { + public fun (Ljava/lang/String;[B)V + public final fun component1 ()Ljava/lang/String; + public final fun component2 ()[B + public final fun copy (Ljava/lang/String;[B)Lcom/apollographql/cache/normalized/sql/internal/record/SelectAllRecords; + public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/record/SelectAllRecords;Ljava/lang/String;[BILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/record/SelectAllRecords; + public fun equals (Ljava/lang/Object;)Z + public final fun getKey ()Ljava/lang/String; + public final fun getRecord ()[B + public fun hashCode ()I + public fun toString ()Ljava/lang/String; +} + +public final class com/apollographql/cache/normalized/sql/internal/record/SelectRecords { + public fun (Ljava/lang/String;[B)V + public final fun component1 ()Ljava/lang/String; + public final fun component2 ()[B + public final fun copy (Ljava/lang/String;[B)Lcom/apollographql/cache/normalized/sql/internal/record/SelectRecords; + public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/record/SelectRecords;Ljava/lang/String;[BILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/record/SelectRecords; + public fun equals (Ljava/lang/Object;)Z + public final fun getKey ()Ljava/lang/String; + public final fun getRecord ()[B + public fun hashCode ()I + public fun toString ()Ljava/lang/String; +} + +public abstract interface class com/apollographql/cache/normalized/sql/internal/record/SqlRecordDatabase : app/cash/sqldelight/Transacter { + public static final field Companion Lcom/apollographql/cache/normalized/sql/internal/record/SqlRecordDatabase$Companion; + public abstract fun getRecordQueries ()Lcom/apollographql/cache/normalized/sql/internal/record/RecordQueries; +} + +public final class com/apollographql/cache/normalized/sql/internal/record/SqlRecordDatabase$Companion { + public final fun getSchema ()Lapp/cash/sqldelight/db/SqlSchema; + public final fun invoke (Lapp/cash/sqldelight/db/SqlDriver;)Lcom/apollographql/cache/normalized/sql/internal/record/SqlRecordDatabase; } diff --git a/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api b/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api index 8b072d78..1889d7e1 100644 --- a/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api +++ b/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api @@ -6,59 +6,82 @@ // - Show declarations: true // Library unique name: -abstract interface com.apollographql.cache.normalized.sql.internal.fields/FieldsDatabase : app.cash.sqldelight/Transacter { // com.apollographql.cache.normalized.sql.internal.fields/FieldsDatabase|null[0] - abstract val fieldsQueries // com.apollographql.cache.normalized.sql.internal.fields/FieldsDatabase.fieldsQueries|{}fieldsQueries[0] - abstract fun (): com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries // com.apollographql.cache.normalized.sql.internal.fields/FieldsDatabase.fieldsQueries.|(){}[0] +abstract interface com.apollographql.cache.normalized.sql.internal.record/SqlRecordDatabase : app.cash.sqldelight/Transacter { // com.apollographql.cache.normalized.sql.internal.record/SqlRecordDatabase|null[0] + abstract val recordQueries // com.apollographql.cache.normalized.sql.internal.record/SqlRecordDatabase.recordQueries|{}recordQueries[0] + abstract fun (): com.apollographql.cache.normalized.sql.internal.record/RecordQueries // com.apollographql.cache.normalized.sql.internal.record/SqlRecordDatabase.recordQueries.|(){}[0] - final object Companion { // com.apollographql.cache.normalized.sql.internal.fields/FieldsDatabase.Companion|null[0] - final val Schema // com.apollographql.cache.normalized.sql.internal.fields/FieldsDatabase.Companion.Schema|{}Schema[0] - final fun (): app.cash.sqldelight.db/SqlSchema> // com.apollographql.cache.normalized.sql.internal.fields/FieldsDatabase.Companion.Schema.|(){}[0] + final object Companion { // com.apollographql.cache.normalized.sql.internal.record/SqlRecordDatabase.Companion|null[0] + final val Schema // com.apollographql.cache.normalized.sql.internal.record/SqlRecordDatabase.Companion.Schema|{}Schema[0] + final fun (): app.cash.sqldelight.db/SqlSchema> // com.apollographql.cache.normalized.sql.internal.record/SqlRecordDatabase.Companion.Schema.|(){}[0] - final fun invoke(app.cash.sqldelight.db/SqlDriver): com.apollographql.cache.normalized.sql.internal.fields/FieldsDatabase // com.apollographql.cache.normalized.sql.internal.fields/FieldsDatabase.Companion.invoke|invoke(app.cash.sqldelight.db.SqlDriver){}[0] + final fun invoke(app.cash.sqldelight.db/SqlDriver): com.apollographql.cache.normalized.sql.internal.record/SqlRecordDatabase // com.apollographql.cache.normalized.sql.internal.record/SqlRecordDatabase.Companion.invoke|invoke(app.cash.sqldelight.db.SqlDriver){}[0] } } -final class com.apollographql.cache.normalized.sql.internal.fields/Field_ { // com.apollographql.cache.normalized.sql.internal.fields/Field_|null[0] - constructor (kotlin/ByteArray, kotlin/String, kotlin/ByteArray?, kotlin/ByteArray?, kotlin/Long?, kotlin/Long?) // com.apollographql.cache.normalized.sql.internal.fields/Field_.|(kotlin.ByteArray;kotlin.String;kotlin.ByteArray?;kotlin.ByteArray?;kotlin.Long?;kotlin.Long?){}[0] - - final val expiration_date // com.apollographql.cache.normalized.sql.internal.fields/Field_.expiration_date|{}expiration_date[0] - final fun (): kotlin/Long? // com.apollographql.cache.normalized.sql.internal.fields/Field_.expiration_date.|(){}[0] - final val field_ // com.apollographql.cache.normalized.sql.internal.fields/Field_.field_|{}field_[0] - final fun (): kotlin/String // com.apollographql.cache.normalized.sql.internal.fields/Field_.field_.|(){}[0] - final val metadata // com.apollographql.cache.normalized.sql.internal.fields/Field_.metadata|{}metadata[0] - final fun (): kotlin/ByteArray? // com.apollographql.cache.normalized.sql.internal.fields/Field_.metadata.|(){}[0] - final val received_date // com.apollographql.cache.normalized.sql.internal.fields/Field_.received_date|{}received_date[0] - final fun (): kotlin/Long? // com.apollographql.cache.normalized.sql.internal.fields/Field_.received_date.|(){}[0] - final val record // com.apollographql.cache.normalized.sql.internal.fields/Field_.record|{}record[0] - final fun (): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.fields/Field_.record.|(){}[0] - final val value_ // com.apollographql.cache.normalized.sql.internal.fields/Field_.value_|{}value_[0] - final fun (): kotlin/ByteArray? // com.apollographql.cache.normalized.sql.internal.fields/Field_.value_.|(){}[0] - - final fun component1(): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.fields/Field_.component1|component1(){}[0] - final fun component2(): kotlin/String // com.apollographql.cache.normalized.sql.internal.fields/Field_.component2|component2(){}[0] - final fun component3(): kotlin/ByteArray? // com.apollographql.cache.normalized.sql.internal.fields/Field_.component3|component3(){}[0] - final fun component4(): kotlin/ByteArray? // com.apollographql.cache.normalized.sql.internal.fields/Field_.component4|component4(){}[0] - final fun component5(): kotlin/Long? // com.apollographql.cache.normalized.sql.internal.fields/Field_.component5|component5(){}[0] - final fun component6(): kotlin/Long? // com.apollographql.cache.normalized.sql.internal.fields/Field_.component6|component6(){}[0] - final fun copy(kotlin/ByteArray = ..., kotlin/String = ..., kotlin/ByteArray? = ..., kotlin/ByteArray? = ..., kotlin/Long? = ..., kotlin/Long? = ...): com.apollographql.cache.normalized.sql.internal.fields/Field_ // com.apollographql.cache.normalized.sql.internal.fields/Field_.copy|copy(kotlin.ByteArray;kotlin.String;kotlin.ByteArray?;kotlin.ByteArray?;kotlin.Long?;kotlin.Long?){}[0] - final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.sql.internal.fields/Field_.equals|equals(kotlin.Any?){}[0] - final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.sql.internal.fields/Field_.hashCode|hashCode(){}[0] - final fun toString(): kotlin/String // com.apollographql.cache.normalized.sql.internal.fields/Field_.toString|toString(){}[0] +final class com.apollographql.cache.normalized.sql.internal.record/Record { // com.apollographql.cache.normalized.sql.internal.record/Record|null[0] + constructor (kotlin/String, kotlin/ByteArray, kotlin/Long) // com.apollographql.cache.normalized.sql.internal.record/Record.|(kotlin.String;kotlin.ByteArray;kotlin.Long){}[0] + + final val key // com.apollographql.cache.normalized.sql.internal.record/Record.key|{}key[0] + final fun (): kotlin/String // com.apollographql.cache.normalized.sql.internal.record/Record.key.|(){}[0] + final val record // com.apollographql.cache.normalized.sql.internal.record/Record.record|{}record[0] + final fun (): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.record/Record.record.|(){}[0] + final val update_date // com.apollographql.cache.normalized.sql.internal.record/Record.update_date|{}update_date[0] + final fun (): kotlin/Long // com.apollographql.cache.normalized.sql.internal.record/Record.update_date.|(){}[0] + + final fun component1(): kotlin/String // com.apollographql.cache.normalized.sql.internal.record/Record.component1|component1(){}[0] + final fun component2(): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.record/Record.component2|component2(){}[0] + final fun component3(): kotlin/Long // com.apollographql.cache.normalized.sql.internal.record/Record.component3|component3(){}[0] + final fun copy(kotlin/String = ..., kotlin/ByteArray = ..., kotlin/Long = ...): com.apollographql.cache.normalized.sql.internal.record/Record // com.apollographql.cache.normalized.sql.internal.record/Record.copy|copy(kotlin.String;kotlin.ByteArray;kotlin.Long){}[0] + final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.sql.internal.record/Record.equals|equals(kotlin.Any?){}[0] + final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.sql.internal.record/Record.hashCode|hashCode(){}[0] + final fun toString(): kotlin/String // com.apollographql.cache.normalized.sql.internal.record/Record.toString|toString(){}[0] +} + +final class com.apollographql.cache.normalized.sql.internal.record/RecordQueries : app.cash.sqldelight/TransacterImpl { // com.apollographql.cache.normalized.sql.internal.record/RecordQueries|null[0] + constructor (app.cash.sqldelight.db/SqlDriver) // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.|(app.cash.sqldelight.db.SqlDriver){}[0] + + final fun <#A1: kotlin/Any> selectAllRecords(kotlin/Function2): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.selectAllRecords|selectAllRecords(kotlin.Function2){0§}[0] + final fun <#A1: kotlin/Any> selectRecords(kotlin.collections/Collection, kotlin/Function2): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.selectRecords|selectRecords(kotlin.collections.Collection;kotlin.Function2){0§}[0] + final fun changes(): app.cash.sqldelight/ExecutableQuery // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.changes|changes(){}[0] + final fun count(): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.count|count(){}[0] + final fun deleteAllRecords() // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.deleteAllRecords|deleteAllRecords(){}[0] + final fun deleteRecords(kotlin.collections/Collection) // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.deleteRecords|deleteRecords(kotlin.collections.Collection){}[0] + final fun insertOrUpdateRecord(kotlin/String, kotlin/ByteArray, kotlin/Long) // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.insertOrUpdateRecord|insertOrUpdateRecord(kotlin.String;kotlin.ByteArray;kotlin.Long){}[0] + final fun selectAllRecords(): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.selectAllRecords|selectAllRecords(){}[0] + final fun selectRecords(kotlin.collections/Collection): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.selectRecords|selectRecords(kotlin.collections.Collection){}[0] + final fun trimByUpdateDate(kotlin/Long) // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.trimByUpdateDate|trimByUpdateDate(kotlin.Long){}[0] } -final class com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries : app.cash.sqldelight/TransacterImpl { // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries|null[0] - constructor (app.cash.sqldelight.db/SqlDriver) // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.|(app.cash.sqldelight.db.SqlDriver){}[0] - - final fun <#A1: kotlin/Any> selectAllRecords(kotlin/Function6): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.selectAllRecords|selectAllRecords(kotlin.Function6){0§}[0] - final fun <#A1: kotlin/Any> selectRecords(kotlin.collections/Collection, kotlin/Function6): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.selectRecords|selectRecords(kotlin.collections.Collection;kotlin.Function6){0§}[0] - final fun changes(): app.cash.sqldelight/ExecutableQuery // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.changes|changes(){}[0] - final fun count(): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.count|count(){}[0] - final fun deleteAllRecords() // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.deleteAllRecords|deleteAllRecords(){}[0] - final fun deleteRecords(kotlin.collections/Collection) // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.deleteRecords|deleteRecords(kotlin.collections.Collection){}[0] - final fun insertOrUpdateField(kotlin/ByteArray, kotlin/String, kotlin/ByteArray?, kotlin/ByteArray?, kotlin/Long?, kotlin/Long?) // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.insertOrUpdateField|insertOrUpdateField(kotlin.ByteArray;kotlin.String;kotlin.ByteArray?;kotlin.ByteArray?;kotlin.Long?;kotlin.Long?){}[0] - final fun selectAllRecords(): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.selectAllRecords|selectAllRecords(){}[0] - final fun selectRecords(kotlin.collections/Collection): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.selectRecords|selectRecords(kotlin.collections.Collection){}[0] - final fun trimByReceivedDate(kotlin/Long) // com.apollographql.cache.normalized.sql.internal.fields/FieldsQueries.trimByReceivedDate|trimByReceivedDate(kotlin.Long){}[0] +final class com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords { // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords|null[0] + constructor (kotlin/String, kotlin/ByteArray) // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.|(kotlin.String;kotlin.ByteArray){}[0] + + final val key // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.key|{}key[0] + final fun (): kotlin/String // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.key.|(){}[0] + final val record // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.record|{}record[0] + final fun (): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.record.|(){}[0] + + final fun component1(): kotlin/String // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.component1|component1(){}[0] + final fun component2(): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.component2|component2(){}[0] + final fun copy(kotlin/String = ..., kotlin/ByteArray = ...): com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.copy|copy(kotlin.String;kotlin.ByteArray){}[0] + final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.equals|equals(kotlin.Any?){}[0] + final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.hashCode|hashCode(){}[0] + final fun toString(): kotlin/String // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.toString|toString(){}[0] +} + +final class com.apollographql.cache.normalized.sql.internal.record/SelectRecords { // com.apollographql.cache.normalized.sql.internal.record/SelectRecords|null[0] + constructor (kotlin/String, kotlin/ByteArray) // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.|(kotlin.String;kotlin.ByteArray){}[0] + + final val key // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.key|{}key[0] + final fun (): kotlin/String // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.key.|(){}[0] + final val record // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.record|{}record[0] + final fun (): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.record.|(){}[0] + + final fun component1(): kotlin/String // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.component1|component1(){}[0] + final fun component2(): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.component2|component2(){}[0] + final fun copy(kotlin/String = ..., kotlin/ByteArray = ...): com.apollographql.cache.normalized.sql.internal.record/SelectRecords // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.copy|copy(kotlin.String;kotlin.ByteArray){}[0] + final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.equals|equals(kotlin.Any?){}[0] + final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.hashCode|hashCode(){}[0] + final fun toString(): kotlin/String // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.toString|toString(){}[0] } final class com.apollographql.cache.normalized.sql/SqlNormalizedCache : com.apollographql.cache.normalized.api/NormalizedCache { // com.apollographql.cache.normalized.sql/SqlNormalizedCache|null[0] diff --git a/normalized-cache-sqlite-incubating/build.gradle.kts b/normalized-cache-sqlite-incubating/build.gradle.kts index 3dfc94f1..5e910e35 100644 --- a/normalized-cache-sqlite-incubating/build.gradle.kts +++ b/normalized-cache-sqlite-incubating/build.gradle.kts @@ -30,10 +30,10 @@ android { } sqldelight { - databases.create("FieldsDatabase") { - packageName.set("com.apollographql.cache.normalized.sql.internal.fields") - schemaOutputDirectory.set(file("sqldelight/fields/schema")) - srcDirs.setFrom("src/commonMain/sqldelight/fields/") + databases.create("SqlRecordDatabase") { + packageName.set("com.apollographql.cache.normalized.sql.internal.record") + schemaOutputDirectory.set(file("sqldelight/record/schema")) + srcDirs.setFrom("src/commonMain/sqldelight/record/") } } diff --git a/normalized-cache-sqlite-incubating/sqldelight/fields/schema/1.db b/normalized-cache-sqlite-incubating/sqldelight/record/schema/1.db similarity index 96% rename from normalized-cache-sqlite-incubating/sqldelight/fields/schema/1.db rename to normalized-cache-sqlite-incubating/sqldelight/record/schema/1.db index d7585ec68ff45e47e5bb2dfa19355fec16eff4c0..8d1e4af6e21471cdc6db9d26d44f52aa1127d2eb 100644 GIT binary patch delta 129 zcmZp0XmFU2#p1=l@3mP_;0~W&qcJ19xTGXwlVeF@Qch}7YI1&23WQ{G4svx2aa9O$ zbnaS9jMSxcuY; M{34rg^Q#H~0PDCXhyVZp delta 221 zcmZp0XmFU2#UjGMFS4<48DD*4CL_DJxHx08VM$_APHI|aYEBB6U~&#}bqsM;2yt}s zaa8~dDQIvhC}gKrDulR3geds=197O2j}A}}B8?$jmY7qTs^H|~?*vkmn_7~Xl30=m z7br?i_DIbvOHGLf3a2V~`h~caS9jMSxcuY; M{34rg^Q#H~0PDCXhyVZp delta 221 zcmZp0XmFU2#UjGMFS4<48DD*4CL_DJxHx08VM$_APHI|aYEBB6U~&#}bqsM;2yt}s zaa8~dDQIvhC}gKrDulR3geds=197O2j}A}}B8?$jmY7qTs^H|~?*vkmn_7~Xl30=m z7br?i_DIbvOHGLf3a2V~`h~c, - visited: MutableSet = mutableSetOf(), - ): Set { + keys: Collection, + visited: MutableSet = mutableSetOf(), + ): Set { if (keys.isEmpty()) return emptySet() val referencedKeys = - recordDatabase.selectRecords((keys - visited).map { it.toByteArray() }).flatMap { it.referencedFields() }.map { it.key }.toSet() + recordDatabase.selectRecords(keys - visited).flatMap { it.referencedFields() }.map { it.key }.toSet() visited += keys return referencedKeys + getReferencedKeysRecursively(referencedKeys, visited) } @@ -84,56 +82,40 @@ class SqlNormalizedCache internal constructor( /** * Assumes an enclosing transaction */ - private fun internalDeleteRecords(keys: Collection, cascade: Boolean): Int { + private fun internalDeleteRecords(keys: Collection, cascade: Boolean): Int { val referencedKeys = if (cascade) { getReferencedKeysRecursively(keys) } else { emptySet() } return (keys + referencedKeys).chunked(999).sumOf { chunkedKeys -> - recordDatabase.deleteRecords(chunkedKeys.map { it.toByteArray() }) + recordDatabase.deleteRecords(chunkedKeys) recordDatabase.changes().toInt() } } /** * Updates records. - * - * As an optimization, the [records] fields are directly upserted into the db when possible. This is possible when using - * the [DefaultRecordMerger], and [ApolloCacheHeaders.ERRORS_REPLACE_CACHED_VALUES] is set to true. - * Otherwise, the [records] must be merged programmatically using the given [recordMerger], requiring to load the existing records from - * the db first. + * The [records] are merged using the given [recordMerger], requiring to load the existing records from the db first. */ private fun internalUpdateRecords(records: Collection, cacheHeaders: CacheHeaders, recordMerger: RecordMerger): Set { val receivedDate = cacheHeaders.headerValue(ApolloCacheHeaders.RECEIVED_DATE) val expirationDate = cacheHeaders.headerValue(ApolloCacheHeaders.EXPIRATION_DATE) - val errorsReplaceCachedValues = cacheHeaders.headerValue(ApolloCacheHeaders.ERRORS_REPLACE_CACHED_VALUES) == "true" - return if (recordMerger is DefaultRecordMerger && errorsReplaceCachedValues) { - recordDatabase.transaction { - for (record in records) { + return recordDatabase.transaction { + val existingRecords = selectRecords(records.map { it.key }).associateBy { it.key } + records.flatMap { record -> + val existingRecord = existingRecords[record.key] + if (existingRecord == null) { recordDatabase.insertOrUpdateRecord(record.withDates(receivedDate = receivedDate, expirationDate = expirationDate)) + record.fieldKeys() + } else { + val (mergedRecord, changedKeys) = recordMerger.merge(RecordMergerContext(existing = existingRecord, incoming = record.withDates(receivedDate = receivedDate, expirationDate = expirationDate), cacheHeaders = cacheHeaders)) + if (mergedRecord.isNotEmpty()) { + recordDatabase.insertOrUpdateRecord(mergedRecord) + } + changedKeys } - } - records.flatMap { record -> - record.fieldKeys() }.toSet() - } else { - recordDatabase.transaction { - val existingRecords = selectRecords(records.map { it.key }).associateBy { it.key } - records.flatMap { record -> - val existingRecord = existingRecords[record.key] - if (existingRecord == null) { - recordDatabase.insertOrUpdateRecord(record.withDates(receivedDate = receivedDate, expirationDate = expirationDate)) - record.fieldKeys() - } else { - val (mergedRecord, changedKeys) = recordMerger.merge(RecordMergerContext(existing = existingRecord, incoming = record.withDates(receivedDate = receivedDate, expirationDate = expirationDate), cacheHeaders = cacheHeaders)) - if (mergedRecord.isNotEmpty()) { - recordDatabase.insertOrUpdateRecord(mergedRecord) - } - changedKeys - } - }.toSet() - } } } @@ -143,7 +125,7 @@ class SqlNormalizedCache internal constructor( */ private fun selectRecords(keys: Collection): List { return keys - .map { it.key.toByteArray() } + .map { it.key } .chunked(999).flatMap { chunkedKeys -> recordDatabase.selectRecords(chunkedKeys) } @@ -153,7 +135,7 @@ class SqlNormalizedCache internal constructor( val size = recordDatabase.databaseSize() return if (size >= maxSizeBytes) { val count = recordDatabase.count().executeAsOne() - recordDatabase.trimByReceivedDate((count * trimFactor).toLong()) + recordDatabase.trimByUpdateDate((count * trimFactor).toLong()) recordDatabase.vacuum() recordDatabase.databaseSize() } else { diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/ApolloJsonElementSerializer.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/ApolloJsonElementSerializer.kt deleted file mode 100644 index dec8d241..00000000 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/ApolloJsonElementSerializer.kt +++ /dev/null @@ -1,244 +0,0 @@ -package com.apollographql.cache.normalized.sql.internal - -import com.apollographql.apollo.api.Error -import com.apollographql.apollo.api.Error.Builder -import com.apollographql.apollo.api.json.ApolloJsonElement -import com.apollographql.apollo.api.json.JsonNumber -import com.apollographql.cache.normalized.api.CacheKey -import okio.Buffer -import okio.utf8Size - -/** - * A serializer that serializes/deserializes [ApolloJsonElement]s to/from [ByteArray]s. - */ -internal object ApolloJsonElementSerializer { - fun serialize(jsonElement: ApolloJsonElement): ByteArray { - val buffer = Buffer() - buffer.writeAny(jsonElement) - return buffer.readByteArray() - } - - fun deserialize(bytes: ByteArray?): ApolloJsonElement { - if (bytes == null) return null - val buffer = Buffer().write(bytes) - return buffer.readAny() - } - - private fun Buffer.writeString(value: String) { - writeNumber(value.utf8Size()) - writeUtf8(value) - } - - private fun Buffer.readString(): String { - return readUtf8(readNumber().toLong()) - } - - private fun Buffer.writeNumber(value: Number) { - when (value.toLong()) { - 0L -> { - writeByte(NUMBER_0) - } - - in Byte.MIN_VALUE..Byte.MAX_VALUE -> { - writeByte(NUMBER_BYTE) - writeByte(value.toInt()) - } - - in Short.MIN_VALUE..Short.MAX_VALUE -> { - writeByte(NUMBER_SHORT) - writeShort(value.toInt()) - } - - in Int.MIN_VALUE..Int.MAX_VALUE -> { - writeByte(NUMBER_INT) - writeInt(value.toInt()) - } - - else -> { - writeByte(NUMBER_LONG) - writeLong(value.toLong()) - } - } - } - - private fun Buffer.readNumber(): Number { - return when (val what = readByte().toInt()) { - NUMBER_0 -> 0 - NUMBER_BYTE -> readByte() - NUMBER_SHORT -> readShort() - NUMBER_INT -> readInt() - NUMBER_LONG -> readLong() - else -> error("Trying to read unsupported Number type: $what") - } - } - - private fun Buffer.writeAny(value: ApolloJsonElement) { - when (value) { - is String -> { - if (value.isEmpty()) { - writeByte(EMPTY_STRING) - } else { - writeByte(STRING) - writeString(value) - } - } - - is Int, is Long -> { - writeNumber(value) - } - - is Double -> { - buffer.writeByte(DOUBLE) - buffer.writeLong(value.toBits()) - } - - is JsonNumber -> { - buffer.writeByte(JSON_NUMBER) - buffer.writeString(value.value) - } - - is Boolean -> { - if (value) { - buffer.writeByte(BOOLEAN_TRUE) - } else { - buffer.writeByte(BOOLEAN_FALSE) - } - } - - is CacheKey -> { - buffer.writeByte(CACHE_KEY) - buffer.write(value.key) - } - - is List<*> -> { - if (value.isEmpty()) { - buffer.writeByte(EMPTY_LIST) - } else { - buffer.writeByte(LIST) - buffer.writeNumber(value.size) - value.forEach { - buffer.writeAny(it) - } - } - } - - is Map<*, *> -> { - if (value.isEmpty()) { - buffer.writeByte(MAP_EMPTY) - } else { - buffer.writeByte(MAP) - buffer.writeNumber(value.size) - @Suppress("UNCHECKED_CAST") - value as Map - value.forEach { - buffer.writeString(it.key) - buffer.writeAny(it.value) - } - } - } - - null -> { - buffer.writeByte(NULL) - } - - is Error -> { - buffer.writeByte(ERROR) - buffer.writeString(value.message) - buffer.writeNumber(value.locations?.size ?: 0) - for (location in value.locations.orEmpty()) { - buffer.writeNumber(location.line) - buffer.writeNumber(location.column) - } - buffer.writeNumber(value.path?.size ?: 0) - for (path in value.path.orEmpty()) { - buffer.writeAny(path) - } - buffer.writeAny(value.extensions) - } - - else -> error("Trying to write unsupported Record value: $value") - } - } - - private fun Buffer.readAny(): ApolloJsonElement { - return when (val what = readByte().toInt()) { - STRING -> readString() - EMPTY_STRING -> "" - NUMBER_0 -> 0 - NUMBER_BYTE -> readByte().toInt() - NUMBER_SHORT -> readShort().toInt() - NUMBER_INT -> readInt() - NUMBER_LONG -> readLong() - DOUBLE -> Double.fromBits(readLong()) - JSON_NUMBER -> JsonNumber(readString()) - BOOLEAN_TRUE -> true - BOOLEAN_FALSE -> false - CACHE_KEY -> { - CacheKey(readByteString(CacheKey.HASH_SIZE_BYTES.toLong())) - } - - LIST -> { - val size = readNumber().toInt() - 0.until(size).map { - readAny() - } - } - EMPTY_LIST -> emptyList() - - MAP -> { - val size = readNumber().toInt() - 0.until(size).associate { - readString() to readAny() - } - } - MAP_EMPTY -> emptyMap() - - NULL -> null - - ERROR -> { - val message = readString() - val locations = 0.until(readNumber().toInt()).map { - Error.Location(readNumber().toInt(), readNumber().toInt()) - } - val path = 0.until(readNumber().toInt()).map { - readAny()!! - } - - @Suppress("UNCHECKED_CAST") - val extensions = readAny() as Map? - Builder(message = message) - .path(path) - .apply { - for ((key, value) in extensions.orEmpty()) { - putExtension(key, value) - } - if (locations.isNotEmpty()) { - locations(locations) - } - } - .build() - } - - else -> error("Trying to read unsupported Record type: $what") - } - } - - private const val NULL = 0 - private const val STRING = 1 - private const val EMPTY_STRING = 2 - private const val NUMBER_0 = 3 - private const val NUMBER_BYTE = 4 - private const val NUMBER_SHORT = 5 - private const val NUMBER_INT = 6 - private const val NUMBER_LONG = 7 - private const val BOOLEAN_TRUE = 8 - private const val BOOLEAN_FALSE = 9 - private const val DOUBLE = 10 - private const val JSON_NUMBER = 11 - private const val LIST = 12 - private const val EMPTY_LIST = 13 - private const val MAP = 14 - private const val MAP_EMPTY = 15 - private const val CACHE_KEY = 16 - private const val ERROR = 17 -} diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt index db40bf76..849806d0 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt @@ -3,23 +3,16 @@ package com.apollographql.cache.normalized.sql.internal import app.cash.sqldelight.Query import app.cash.sqldelight.db.QueryResult import app.cash.sqldelight.db.SqlDriver -import com.apollographql.apollo.api.json.ApolloJsonElement -import com.apollographql.cache.normalized.api.ApolloCacheHeaders -import com.apollographql.cache.normalized.api.CacheKey +import com.apollographql.apollo.mpp.currentTimeMillis import com.apollographql.cache.normalized.api.Record -import com.apollographql.cache.normalized.api.expirationDate -import com.apollographql.cache.normalized.api.receivedDate -import com.apollographql.cache.normalized.sql.internal.fields.Field_ -import com.apollographql.cache.normalized.sql.internal.fields.FieldsDatabase -import com.apollographql.cache.normalized.sql.internal.fields.FieldsQueries -import okio.ByteString -import okio.ByteString.Companion.toByteString +import com.apollographql.cache.normalized.sql.internal.record.RecordQueries +import com.apollographql.cache.normalized.sql.internal.record.SqlRecordDatabase internal class RecordDatabase(private val driver: SqlDriver) { - private val fieldsQueries: FieldsQueries = FieldsDatabase(driver).fieldsQueries + private val recordQueries: RecordQueries = SqlRecordDatabase(driver).recordQueries fun transaction(body: () -> T): T { - return fieldsQueries.transactionWithResult { + return recordQueries.transactionWithResult { body() } } @@ -27,95 +20,28 @@ internal class RecordDatabase(private val driver: SqlDriver) { /** * @param keys the keys of the records to select, size must be <= 999 */ - fun selectRecords(keys: Collection): List { - val fieldsByRecordKey: Map> = - fieldsQueries.selectRecords(keys).executeAsList().groupBy { it.record.toByteString() } - return fieldsByRecordKey.toRecords() + fun selectRecords(keys: Collection): List { + return recordQueries.selectRecords(keys).executeAsList().map { RecordSerializer.deserialize(it.key, it.record) } } fun selectAllRecords(): List { - val fieldsByRecordKey: Map> = - fieldsQueries.selectAllRecords().executeAsList().groupBy { it.record.toByteString() } - return fieldsByRecordKey.toRecords() + return recordQueries.selectAllRecords().executeAsList().map { RecordSerializer.deserialize(it.key, it.record) } } - private fun Map>.toRecords(): List = - mapValues { (key, fieldList) -> - val fieldValues: Map = fieldList.associate { field -> - field.field_ to ApolloJsonElementSerializer.deserialize(field.value_) - } - - @Suppress("UNCHECKED_CAST") - val metadata: Map> = fieldList.associate { field -> - val deserializedMetadata = ApolloJsonElementSerializer.deserialize(field.metadata) as Map? - field.field_ to LinkedHashMap((deserializedMetadata?.size ?: 0) + 2).also { - if (deserializedMetadata != null) { - it.putAll(deserializedMetadata) - } - - // Dates are stored separately in their own columns - if (field.received_date != null) { - it.put(ApolloCacheHeaders.RECEIVED_DATE, field.received_date) - } - if (field.expiration_date != null) { - it.put(ApolloCacheHeaders.EXPIRATION_DATE, field.expiration_date) - } - } - }.filterValues { it.isNotEmpty() } - Record( - key = CacheKey(key), - fields = fieldValues, - metadata = metadata, - ) - }.values.toList() - fun insertOrUpdateRecord(record: Record) { - for ((field, value) in record.fields) { - insertOrUpdateField( - record = record.key.key.toByteArray(), - field = field, - value = value, - metadata = record.metadata[field], - receivedDate = record.receivedDate(field), - expirationDate = record.expirationDate(field), - ) - } + recordQueries.insertOrUpdateRecord(key = record.key.key, record = RecordSerializer.serialize(record), update_date = currentTimeMillis()) } - private fun insertOrUpdateField( - record: ByteArray, - field: String, - value: ApolloJsonElement, - metadata: Map?, - receivedDate: Long?, - expirationDate: Long?, - ) { - fieldsQueries.insertOrUpdateField( - record = record, - field_ = field, - value_ = ApolloJsonElementSerializer.serialize(value), - metadata = metadata - ?.takeIf { it.isNotEmpty() } - ?.let { - ApolloJsonElementSerializer.serialize( - // Don't store the dates in the metadata as they are stored separately in their own columns - it - ApolloCacheHeaders.RECEIVED_DATE - ApolloCacheHeaders.EXPIRATION_DATE - ) - }, - received_date = receivedDate, - expiration_date = expirationDate, - ) - } /** * @param keys the keys of the records to delete, size must be <= 999 */ - fun deleteRecords(keys: Collection) { - fieldsQueries.deleteRecords(keys) + fun deleteRecords(keys: Collection) { + recordQueries.deleteRecords(keys) } fun deleteAllRecords() { - fieldsQueries.deleteAllRecords() + recordQueries.deleteAllRecords() } fun databaseSize(): Long { @@ -126,11 +52,11 @@ internal class RecordDatabase(private val driver: SqlDriver) { } fun count(): Query { - return fieldsQueries.count() + return recordQueries.count() } - fun trimByReceivedDate(limit: Long) { - fieldsQueries.trimByReceivedDate(limit) + fun trimByUpdateDate(limit: Long) { + recordQueries.trimByUpdateDate(limit) } fun vacuum() { @@ -138,6 +64,6 @@ internal class RecordDatabase(private val driver: SqlDriver) { } fun changes(): Long { - return fieldsQueries.changes().executeAsOne() + return recordQueries.changes().executeAsOne() } } diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordSerializer.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordSerializer.kt new file mode 100644 index 00000000..15842d3a --- /dev/null +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordSerializer.kt @@ -0,0 +1,321 @@ +package com.apollographql.cache.normalized.sql.internal + +import com.apollographql.apollo.api.Error +import com.apollographql.apollo.api.Error.Builder +import com.apollographql.apollo.api.json.ApolloJsonElement +import com.apollographql.apollo.api.json.JsonNumber +import com.apollographql.cache.normalized.api.CacheKey +import com.apollographql.cache.normalized.api.Record +import com.apollographql.cache.normalized.api.RecordValue +import okio.Buffer +import okio.utf8Size + +/** + * A serializer that serializes/deserializes [RecordValue]s to/from [ByteArray]s. + */ +internal object RecordSerializer { + fun serialize(record: Record): ByteArray { + val buffer = Buffer() + buffer.writeMap(record.fields) + buffer._writeInt(record.metadata.size) + for ((k, v) in record.metadata) { + buffer.writeString(k) + buffer.writeMap(v) + } + return buffer.readByteArray() + } + + fun deserialize(key: String, bytes: ByteArray): Record { + val buffer = Buffer().write(bytes) + val fields = buffer.readMap() + val metadataSize = buffer._readInt() + val metadata = HashMap>(metadataSize).apply { + repeat(metadataSize) { + val k = buffer.readString() + val v = buffer.readMap() + put(k, v) + } + } + return Record( + key = CacheKey(key), + fields = fields, + mutationId = null, + metadata = metadata + ) + } + + private fun Buffer.writeString(value: String) { + _writeInt(value.utf8Size().toInt()) + writeUtf8(value) + } + + private fun Buffer.readString(): String { + return readUtf8(_readInt().toLong()) + } + + private fun Buffer._writeInt(value: Int) { + when (value) { + 0 -> { + writeByte(INT_0) + } + + in Byte.MIN_VALUE..Byte.MAX_VALUE -> { + writeByte(INT_BYTE) + writeByte(value.toInt()) + } + + in Short.MIN_VALUE..Short.MAX_VALUE -> { + writeByte(INT_SHORT) + writeShort(value.toInt()) + } + + else -> { + writeByte(INT_INT) + writeInt(value.toInt()) + } + } + } + + private fun Buffer._readInt(): Int { + return when (val what = readByte().toInt()) { + INT_0 -> 0 + INT_BYTE -> readByte().toInt() + INT_SHORT -> readShort().toInt() + INT_INT -> readInt() + else -> error("Trying to read unsupported Int type: $what") + } + } + + private fun Buffer._writeLong(value: Long) { + when (value) { + 0L -> { + writeByte(LONG_0) + } + + in Byte.MIN_VALUE..Byte.MAX_VALUE -> { + writeByte(LONG_BYTE) + writeByte(value.toInt()) + } + + in Short.MIN_VALUE..Short.MAX_VALUE -> { + writeByte(LONG_SHORT) + writeShort(value.toInt()) + } + + in Int.MIN_VALUE..Int.MAX_VALUE -> { + writeByte(LONG_INT) + writeInt(value.toInt()) + } + + else -> { + writeByte(LONG_LONG) + writeLong(value.toLong()) + } + } + } + + private fun Buffer._readLong(): Long { + return when (val what = readByte().toInt()) { + LONG_0 -> 0L + LONG_BYTE -> readByte().toLong() + LONG_SHORT -> readShort().toLong() + LONG_INT -> readInt().toLong() + LONG_LONG -> readLong() + else -> error("Trying to read unsupported Long type: $what") + } + } + + private fun Buffer.writeMap(value: Map<*, *>) { + _writeInt(value.size) + @Suppress("UNCHECKED_CAST") + value as Map + for ((k, v) in value) { + writeString(k) + writeAny(v) + } + } + + private fun Buffer.readMap(): Map { + val size = _readInt() + return HashMap(size).apply { + repeat(size) { + put(readString(), readAny()) + } + } + } + + private fun Buffer.writeAny(value: RecordValue) { + when (value) { + is String -> { + if (value.isEmpty()) { + writeByte(EMPTY_STRING) + } else { + writeByte(STRING) + writeString(value) + } + } + + is Int -> { + _writeInt(value) + } + + is Long -> { + _writeLong(value) + } + + is Double -> { + writeByte(DOUBLE) + writeLong(value.toBits()) + } + + is JsonNumber -> { + writeByte(JSON_NUMBER) + writeString(value.value) + } + + is Boolean -> { + if (value) { + writeByte(BOOLEAN_TRUE) + } else { + writeByte(BOOLEAN_FALSE) + } + } + + is CacheKey -> { + writeByte(CACHE_KEY) + writeString(value.key) + } + + is List<*> -> { + if (value.isEmpty()) { + writeByte(EMPTY_LIST) + } else { + writeByte(LIST) + _writeInt(value.size) + value.forEach { + writeAny(it) + } + } + } + + is Map<*, *> -> { + if (value.isEmpty()) { + writeByte(MAP_EMPTY) + } else { + writeByte(MAP) + writeMap(value) + } + } + + null -> { + writeByte(NULL) + } + + is Error -> { + writeByte(ERROR) + writeString(value.message) + _writeInt(value.locations?.size ?: 0) + for (location in value.locations.orEmpty()) { + _writeInt(location.line) + _writeInt(location.column) + } + _writeInt(value.path?.size ?: 0) + for (path in value.path.orEmpty()) { + writeAny(path) + } + writeAny(value.extensions) + } + + else -> error("Trying to write unsupported Record value: $value") + } + } + + private fun Buffer.readAny(): RecordValue { + return when (val what = readByte().toInt()) { + STRING -> readString() + EMPTY_STRING -> "" + INT_0 -> 0 + INT_BYTE -> readByte().toInt() + INT_SHORT -> readShort().toInt() + INT_INT -> readInt() + LONG_0 -> 0L + LONG_BYTE -> readByte().toLong() + LONG_SHORT -> readShort().toLong() + LONG_INT -> readInt().toLong() + LONG_LONG -> readLong() + DOUBLE -> Double.fromBits(readLong()) + JSON_NUMBER -> JsonNumber(readString()) + BOOLEAN_TRUE -> true + BOOLEAN_FALSE -> false + CACHE_KEY -> { + CacheKey(readString()) + } + + LIST -> { + val size = _readInt() + 0.until(size).map { + readAny() + } + } + + EMPTY_LIST -> emptyList() + + MAP -> { + readMap() + } + + MAP_EMPTY -> emptyMap() + + NULL -> null + + ERROR -> { + val message = readString() + val locations = 0.until(_readInt()).map { + Error.Location(_readInt(), _readInt()) + } + val path = 0.until(_readInt()).map { + readAny()!! + } + + @Suppress("UNCHECKED_CAST") + val extensions = readAny() as Map? + Builder(message = message) + .path(path) + .apply { + for ((key, value) in extensions.orEmpty()) { + putExtension(key, value) + } + if (locations.isNotEmpty()) { + locations(locations) + } + } + .build() + } + + else -> error("Trying to read unsupported Record type: $what") + } + } + + private const val NULL = 0 + private const val STRING = 1 + private const val EMPTY_STRING = 2 + private const val INT_0 = 3 + private const val INT_BYTE = 4 + private const val INT_SHORT = 5 + private const val INT_INT = 6 + private const val LONG_0 = 7 + private const val LONG_BYTE = 8 + private const val LONG_SHORT = 9 + private const val LONG_INT = 10 + private const val LONG_LONG = 11 + private const val BOOLEAN_TRUE = 12 + private const val BOOLEAN_FALSE = 13 + private const val DOUBLE = 14 + private const val JSON_NUMBER = 15 + private const val LIST = 16 + private const val EMPTY_LIST = 17 + private const val MAP = 18 + private const val MAP_EMPTY = 19 + private const val CACHE_KEY = 20 + private const val ERROR = 21 +} diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/factoryHelpers.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/factoryHelpers.kt index 68346b05..0d7660fc 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/factoryHelpers.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/factoryHelpers.kt @@ -4,7 +4,7 @@ import app.cash.sqldelight.db.QueryResult import app.cash.sqldelight.db.SqlDriver import app.cash.sqldelight.db.SqlSchema import com.apollographql.apollo.exception.apolloExceptionHandler -import com.apollographql.cache.normalized.sql.internal.fields.FieldsDatabase +import com.apollographql.cache.normalized.sql.internal.record.SqlRecordDatabase internal fun createRecordDatabase(driver: SqlDriver): RecordDatabase { maybeCreateOrMigrateSchema(driver, getSchema()) @@ -30,7 +30,7 @@ internal fun createRecordDatabase(driver: SqlDriver): RecordDatabase { */ } - val expectedTableName = "field" + val expectedTableName = "record" check(tableNames.isEmpty() || tableNames.contains(expectedTableName)) { "Apollo: Cannot find the '$expectedTableName' table (found '$tableNames' instead)" } @@ -38,4 +38,4 @@ internal fun createRecordDatabase(driver: SqlDriver): RecordDatabase { return RecordDatabase(driver) } -internal fun getSchema(): SqlSchema> = FieldsDatabase.Schema +internal fun getSchema(): SqlSchema> = SqlRecordDatabase.Schema diff --git a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/apollographql/cache/normalized/sql/internal/fields/fields.sq b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/apollographql/cache/normalized/sql/internal/fields/fields.sq deleted file mode 100644 index d65e3962..00000000 --- a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/apollographql/cache/normalized/sql/internal/fields/fields.sq +++ /dev/null @@ -1,41 +0,0 @@ -CREATE TABLE field ( - record BLOB NOT NULL, - field TEXT NOT NULL, - value BLOB, - metadata BLOB, - received_date INTEGER, - expiration_date INTEGER, - PRIMARY KEY (record, field) ON CONFLICT REPLACE -) -WITHOUT ROWID; - -selectRecords: -SELECT record, field, value, metadata, received_date, expiration_date FROM field WHERE record IN ?; - -selectAllRecords: -SELECT record, field, value, metadata, received_date, expiration_date FROM field; - -insertOrUpdateField: -INSERT INTO field (record, field, value, metadata, received_date, expiration_date) VALUES (?, ?, ?, ?, ?, ?); - -deleteRecords: -DELETE FROM field WHERE record IN ?; - -deleteAllRecords: -DELETE FROM field; - -count: -SELECT count(*) FROM field; - -trimByReceivedDate: -DELETE FROM field -WHERE (record, field) IN ( - SELECT record, field - FROM field - WHERE received_date IS NOT NULL - ORDER BY received_date ASC - LIMIT ? -); - -changes: -SELECT changes(); diff --git a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/migrations/1.sqm b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/migrations/1.sqm deleted file mode 100644 index efe1223b..00000000 --- a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/fields/com/migrations/1.sqm +++ /dev/null @@ -1,13 +0,0 @@ --- Version 1 is either the fields schema (do nothing) or the legacy json schema (drop and create) -DROP TABLE IF EXISTS records; - -CREATE TABLE IF NOT EXISTS field ( - record TEXT NOT NULL, - field TEXT NOT NULL, - value BLOB, - metadata BLOB, - received_date INTEGER, - expiration_date INTEGER, - PRIMARY KEY (record, field) ON CONFLICT REPLACE -) -WITHOUT ROWID; diff --git a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/record/com/apollographql/cache/normalized/sql/internal/record/record.sq b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/record/com/apollographql/cache/normalized/sql/internal/record/record.sq new file mode 100644 index 00000000..141dc343 --- /dev/null +++ b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/record/com/apollographql/cache/normalized/sql/internal/record/record.sq @@ -0,0 +1,31 @@ +CREATE TABLE record ( + key TEXT NOT NULL, + record BLOB NOT NULL, + update_date INTEGER NOT NULL, + PRIMARY KEY (key) ON CONFLICT REPLACE +) +WITHOUT ROWID; + +selectRecords: +SELECT key, record FROM record WHERE key IN ?; + +selectAllRecords: +SELECT key, record FROM record; + +insertOrUpdateRecord: +INSERT INTO record (key, record, update_date) VALUES (?, ?, ?); + +deleteRecords: +DELETE FROM record WHERE key IN ?; + +deleteAllRecords: +DELETE FROM record; + +count: +SELECT count(*) FROM record; + +trimByUpdateDate: +DELETE FROM record WHERE key IN (SELECT key FROM record ORDER BY update_date LIMIT ?); + +changes: +SELECT changes(); diff --git a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/record/com/migrations/1.sqm b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/record/com/migrations/1.sqm new file mode 100644 index 00000000..d3a6425a --- /dev/null +++ b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/record/com/migrations/1.sqm @@ -0,0 +1,10 @@ +-- Version 1 is either the record schema (do nothing) or the legacy json schema (drop and create) +DROP TABLE IF EXISTS records; + +CREATE TABLE IF NOT EXISTS record ( + key TEXT NOT NULL, + record BLOB NOT NULL, + update_date INTEGER NOT NULL, + PRIMARY KEY (key) ON CONFLICT REPLACE +) +WITHOUT ROWID; From 1bc223862208817580938b4baf0dc3126f83db30 Mon Sep 17 00:00:00 2001 From: BoD Date: Tue, 25 Mar 2025 10:28:19 +0100 Subject: [PATCH 12/29] Renames for consistency --- .../normalized/sql/internal/RecordSerializer.kt | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordSerializer.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordSerializer.kt index 15842d3a..47c7e731 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordSerializer.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordSerializer.kt @@ -148,7 +148,7 @@ internal object RecordSerializer { when (value) { is String -> { if (value.isEmpty()) { - writeByte(EMPTY_STRING) + writeByte(STRING_EMPTY) } else { writeByte(STRING) writeString(value) @@ -188,7 +188,7 @@ internal object RecordSerializer { is List<*> -> { if (value.isEmpty()) { - writeByte(EMPTY_LIST) + writeByte(LIST_EMPTY) } else { writeByte(LIST) _writeInt(value.size) @@ -233,7 +233,7 @@ internal object RecordSerializer { private fun Buffer.readAny(): RecordValue { return when (val what = readByte().toInt()) { STRING -> readString() - EMPTY_STRING -> "" + STRING_EMPTY -> "" INT_0 -> 0 INT_BYTE -> readByte().toInt() INT_SHORT -> readShort().toInt() @@ -258,7 +258,7 @@ internal object RecordSerializer { } } - EMPTY_LIST -> emptyList() + LIST_EMPTY -> emptyList() MAP -> { readMap() @@ -298,7 +298,7 @@ internal object RecordSerializer { private const val NULL = 0 private const val STRING = 1 - private const val EMPTY_STRING = 2 + private const val STRING_EMPTY = 2 private const val INT_0 = 3 private const val INT_BYTE = 4 private const val INT_SHORT = 5 @@ -313,7 +313,7 @@ internal object RecordSerializer { private const val DOUBLE = 14 private const val JSON_NUMBER = 15 private const val LIST = 16 - private const val EMPTY_LIST = 17 + private const val LIST_EMPTY = 17 private const val MAP = 18 private const val MAP_EMPTY = 19 private const val CACHE_KEY = 20 From ec15cbd7bef7f436a37d3f169404b644fbbf55b1 Mon Sep 17 00:00:00 2001 From: BoD Date: Tue, 25 Mar 2025 10:58:33 +0100 Subject: [PATCH 13/29] Add ApolloStore.trim() --- CHANGELOG.md | 3 ++- .../api/normalized-cache-incubating.api | 2 ++ .../api/normalized-cache-incubating.klib.api | 1 + .../apollographql/cache/normalized/ApolloStore.kt | 12 ++++++++++++ .../cache/normalized/api/NormalizedCache.kt | 4 +--- .../normalized/internal/DefaultApolloStore.kt | 4 ++++ .../internal/OptimisticNormalizedCache.kt | 4 ++++ .../cache/normalized/sql/TrimTest.kt | 15 ++++++++------- 8 files changed, 34 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f6d26924..2a3cb8d0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,7 @@ # Next version (unreleased) -PUT_CHANGELOG_HERE +- Storage binary format is changed to be a bit more compact +- Add ApolloStore.trim() to remove old data from the cache # Version 0.0.7 _2025-03-03_ diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.api b/normalized-cache-incubating/api/normalized-cache-incubating.api index 2b823024..a5939ff1 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.api @@ -12,6 +12,7 @@ public abstract interface class com/apollographql/cache/normalized/ApolloStore { public abstract fun remove (Ljava/util/List;Z)I public abstract fun remove-eNSUWrY (Ljava/lang/String;Z)Z public abstract fun rollbackOptimisticUpdates (Ljava/util/UUID;)Ljava/util/Set; + public abstract fun trim (JF)J public abstract fun writeFragment-1qdIjGk (Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/Fragment$Data;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Set; public abstract fun writeOperation (Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/Operation$Data;Ljava/util/List;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Set; public abstract fun writeOperation (Lcom/apollographql/apollo/api/Operation;Ljava/util/Map;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Set; @@ -29,6 +30,7 @@ public final class com/apollographql/cache/normalized/ApolloStore$DefaultImpls { public static synthetic fun readOperation$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Lcom/apollographql/apollo/api/ApolloResponse; public static synthetic fun remove$default (Lcom/apollographql/cache/normalized/ApolloStore;Ljava/util/List;ZILjava/lang/Object;)I public static synthetic fun remove-eNSUWrY$default (Lcom/apollographql/cache/normalized/ApolloStore;Ljava/lang/String;ZILjava/lang/Object;)Z + public static synthetic fun trim$default (Lcom/apollographql/cache/normalized/ApolloStore;JFILjava/lang/Object;)J public static synthetic fun writeFragment-1qdIjGk$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/Fragment$Data;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Ljava/util/Set; public static synthetic fun writeOperation$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/Operation$Data;Ljava/util/List;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Ljava/util/Set; public static synthetic fun writeOperation$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Operation;Ljava/util/Map;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Ljava/util/Set; diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api index 11f6118e..fb728d49 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api @@ -95,6 +95,7 @@ abstract interface com.apollographql.cache.normalized/ApolloStore { // com.apoll abstract fun remove(com.apollographql.cache.normalized.api/CacheKey, kotlin/Boolean = ...): kotlin/Boolean // com.apollographql.cache.normalized/ApolloStore.remove|remove(com.apollographql.cache.normalized.api.CacheKey;kotlin.Boolean){}[0] abstract fun remove(kotlin.collections/List, kotlin/Boolean = ...): kotlin/Int // com.apollographql.cache.normalized/ApolloStore.remove|remove(kotlin.collections.List;kotlin.Boolean){}[0] abstract fun rollbackOptimisticUpdates(com.benasher44.uuid/Uuid): kotlin.collections/Set // com.apollographql.cache.normalized/ApolloStore.rollbackOptimisticUpdates|rollbackOptimisticUpdates(com.benasher44.uuid.Uuid){}[0] + abstract fun trim(kotlin/Long, kotlin/Float = ...): kotlin/Long // com.apollographql.cache.normalized/ApolloStore.trim|trim(kotlin.Long;kotlin.Float){}[0] abstract suspend fun publish(kotlin.collections/Set) // com.apollographql.cache.normalized/ApolloStore.publish|publish(kotlin.collections.Set){}[0] final class <#A1: com.apollographql.apollo.api/Executable.Data> ReadResult { // com.apollographql.cache.normalized/ApolloStore.ReadResult|null[0] diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt index 0727e342..abdbfb54 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt @@ -238,6 +238,18 @@ interface ApolloStore { */ fun remove(cacheKeys: List, cascade: Boolean = true): Int + /** + * Trims the store if its size exceeds [maxSizeBytes]. The amount of data to remove is determined by [trimFactor]. + * The oldest records are removed according to their update date. + * + * This may not be supported by all cache implementations (currently this is implemented by the SQL cache). + * + * @param maxSizeBytes the size of the cache in bytes above which the cache should be trimmed. + * @param trimFactor the factor of the cache size to trim. + * @return the cache size in bytes after trimming or -1 if the operation is not supported. + */ + fun trim(maxSizeBytes: Long, trimFactor: Float = 0.1f): Long + /** * Normalizes executable data to a map of [Record] keyed by [Record.key]. */ diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/NormalizedCache.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/NormalizedCache.kt index f99823c6..abc96e6f 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/NormalizedCache.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/NormalizedCache.kt @@ -72,15 +72,13 @@ interface NormalizedCache : ReadOnlyNormalizedCache { /** * Trims the cache if its size exceeds [maxSizeBytes]. The amount of data to remove is determined by [trimFactor]. - * The oldest data is removed according to [ApolloCacheHeaders.RECEIVED_DATE]. + * The oldest records are removed according to their update date. * * Optional operation. * * @param maxSizeBytes the size of the cache in bytes above which the cache should be trimmed. * @param trimFactor the factor of the cache size to trim. * @return the cache size in bytes after trimming or -1 if the operation is not supported. - * - * @see com.apollographql.cache.normalized.storeReceiveDate */ fun trim(maxSizeBytes: Long, trimFactor: Float = 0.1f): Long { return -1 diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt index d438689a..bdc5c062 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt @@ -303,6 +303,10 @@ internal class DefaultApolloStore( return cache.merge(record, cacheHeaders, recordMerger) } + override fun trim(maxSizeBytes: Long, trimFactor: Float): Long { + return cache.trim(maxSizeBytes, trimFactor) + } + override fun dump(): Map, Map> { return cache.dump() } diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/OptimisticNormalizedCache.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/OptimisticNormalizedCache.kt index 628f65cf..880e2dd8 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/OptimisticNormalizedCache.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/OptimisticNormalizedCache.kt @@ -46,6 +46,10 @@ internal class OptimisticNormalizedCache(private val wrapped: NormalizedCache) : return wrapped.remove(cacheKeys, cascade) + internalRemove(cacheKeys, cascade) } + override fun trim(maxSizeBytes: Long, trimFactor: Float): Long { + return wrapped.trim(maxSizeBytes, trimFactor) + } + private fun internalRemove(cacheKeys: Collection, cascade: Boolean): Int { var total = 0 val referencedCacheKeys = mutableSetOf() diff --git a/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/TrimTest.kt b/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/TrimTest.kt index 1d4578a8..af7b2604 100644 --- a/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/TrimTest.kt +++ b/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/TrimTest.kt @@ -1,5 +1,6 @@ package com.apollographql.cache.normalized.sql +import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.api.CacheHeaders import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.DefaultRecordMerger @@ -12,7 +13,7 @@ import kotlin.test.assertNull class TrimTest { @Test fun trimTest() { - val cache = SqlNormalizedCacheFactory().create().also { it.clearAll() } + val apolloStore = ApolloStore(SqlNormalizedCacheFactory()).also { it.clearAll() } val largeString = "".padStart(1024, '?') @@ -21,8 +22,8 @@ class TrimTest { fields = mapOf("key" to "value"), mutationId = null, metadata = emptyMap() - ).withDates(receivedDate = "0", expirationDate = null) - cache.merge(oldRecord, CacheHeaders.NONE, recordMerger = DefaultRecordMerger) + ) + apolloStore.accessCache { it.merge(oldRecord, CacheHeaders.NONE, recordMerger = DefaultRecordMerger) } val newRecords = 0.until(2 * 1024).map { Record( @@ -32,16 +33,16 @@ class TrimTest { metadata = emptyMap() ).withDates(receivedDate = it.toString(), expirationDate = null) } - cache.merge(newRecords, CacheHeaders.NONE, recordMerger = DefaultRecordMerger) + apolloStore.accessCache { it.merge(newRecords, CacheHeaders.NONE, recordMerger = DefaultRecordMerger) } - val sizeBeforeTrim = cache.trim(-1) + val sizeBeforeTrim = apolloStore.trim(-1) assertEquals(8515584, sizeBeforeTrim) // Trim the cache by 10% - val sizeAfterTrim = cache.trim(8515584, 0.1f) + val sizeAfterTrim = apolloStore.trim(8515584, 0.1f) assertEquals(7667712, sizeAfterTrim) // The oldest key must have been removed - assertNull(cache.loadRecord(CacheKey("old"), CacheHeaders.NONE)) + assertNull(apolloStore.accessCache { it.loadRecord(CacheKey("old"), CacheHeaders.NONE) }) } } From 001ec2a96daa2ac386705efc9881e8b979d86c35 Mon Sep 17 00:00:00 2001 From: BoD Date: Tue, 25 Mar 2025 10:58:59 +0100 Subject: [PATCH 14/29] Increase SQLite's memory cache to 8 MiB --- .../cache/normalized/sql/internal/factoryHelpers.kt | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/factoryHelpers.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/factoryHelpers.kt index 0d7660fc..8892a1be 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/factoryHelpers.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/factoryHelpers.kt @@ -35,6 +35,14 @@ internal fun createRecordDatabase(driver: SqlDriver): RecordDatabase { "Apollo: Cannot find the '$expectedTableName' table (found '$tableNames' instead)" } + try { + // Increase the memory cache to 8 MiB + // https://www.sqlite.org/pragma.html#pragma_cache_size + driver.executeQuery(null, "PRAGMA cache_size = -8192;", { QueryResult.Unit }, 0) + } catch (_: Exception) { + // Not supported on all platforms, ignore + } + return RecordDatabase(driver) } From cf891bea76000f3c87f83d64f42d07910ddd7f0a Mon Sep 17 00:00:00 2001 From: BoD Date: Tue, 25 Mar 2025 14:53:05 +0100 Subject: [PATCH 15/29] Report #107 --- .../cache/normalized/api/DataWithErrors.kt | 1 - .../cache/normalized/internal/Normalizer.kt | 7 +++- .../pagination/connection/schema.graphqls | 2 +- .../kotlin/ConnectionPaginationTest.kt | 39 ++++++++++++++++++- 4 files changed, 44 insertions(+), 5 deletions(-) diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/DataWithErrors.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/DataWithErrors.kt index 0c76c196..6881caca 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/DataWithErrors.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/DataWithErrors.kt @@ -187,7 +187,6 @@ private fun CompiledSelection.fieldSelection(responseName: String): CompiledFiel } } - @Suppress("UNCHECKED_CAST") internal fun Any?.hasErrors(): Boolean { val queue = ArrayDeque() diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt index e15cdffc..8d5bca6e 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt @@ -114,7 +114,11 @@ internal class Normalizer( path = base?.append(fieldKey) ?: CacheKey(fieldKey), embeddedFields = embeddedFieldsProvider.getEmbeddedFields(EmbeddedFieldsContext(parentType)), ) - val metadata = metadataGenerator.metadataForObject(entry.value, MetadataGeneratorContext(field = mergedField, variables)) + val metadata = if (entry.value is Error) { + emptyMap() + } else { + metadataGenerator.metadataForObject(entry.value, MetadataGeneratorContext(field = mergedField, variables)) + } fieldKey to FieldInfo(value, metadata) }.toMap() @@ -158,7 +162,6 @@ internal class Normalizer( return cacheKey } - /** * Replace all objects in [value] with [CacheKey] and if [value] is an object itself, returns it as a [CacheKey] * diff --git a/tests/pagination/src/commonMain/graphql/pagination/connection/schema.graphqls b/tests/pagination/src/commonMain/graphql/pagination/connection/schema.graphqls index e66078a5..f825cd95 100644 --- a/tests/pagination/src/commonMain/graphql/pagination/connection/schema.graphqls +++ b/tests/pagination/src/commonMain/graphql/pagination/connection/schema.graphqls @@ -1,5 +1,5 @@ type Query { - users(first: Int = 10, after: String = null, last: Int = null, before: String = null): UserConnection! + users(first: Int = 10, after: String = null, last: Int = null, before: String = null): UserConnection } type UserConnection { diff --git a/tests/pagination/src/commonTest/kotlin/ConnectionPaginationTest.kt b/tests/pagination/src/commonTest/kotlin/ConnectionPaginationTest.kt index 1c43da4e..89740b4f 100644 --- a/tests/pagination/src/commonTest/kotlin/ConnectionPaginationTest.kt +++ b/tests/pagination/src/commonTest/kotlin/ConnectionPaginationTest.kt @@ -1,5 +1,6 @@ package pagination +import com.apollographql.apollo.api.Error import com.apollographql.apollo.api.Optional import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.ApolloStore @@ -338,5 +339,41 @@ class ConnectionPaginationTest { dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) } -} + @Test + fun errorMemoryCache() { + errorTest(MemoryCacheFactory()) + } + + @Test + fun errorSqlCache() { + errorTest(SqlNormalizedCacheFactory()) + } + + @Test + fun errorChainedCache() { + errorTest(MemoryCacheFactory().chain(SqlNormalizedCacheFactory())) + } + + private fun errorTest(cacheFactory: NormalizedCacheFactory) = runTest { + val apolloStore = ApolloStore( + normalizedCacheFactory = cacheFactory, + cacheKeyGenerator = TypePolicyCacheKeyGenerator, + metadataGenerator = ConnectionMetadataGenerator(Pagination.connectionTypes), + cacheResolver = FieldPolicyCacheResolver, + recordMerger = ConnectionRecordMerger + ) + apolloStore.clearAll() + val query = UsersQuery(first = Optional.Present(2)) + apolloStore.writeOperation( + operation = query, + data = UsersQuery.Data { users = null }, + errors = listOf(Error.Builder("An error occurred.").path(listOf("users")).build()) + ) + val responseFromStore = apolloStore.readOperation(query) + assertEquals(UsersQuery.Data { users = null }, responseFromStore.data) + assertEquals(1, responseFromStore.errors?.size) + assertEquals("An error occurred.", responseFromStore.errors?.firstOrNull()?.message) + assertEquals(listOf("users"), responseFromStore.errors?.firstOrNull()?.path) + } +} From d4d8b9e1c43f2b4bebd47de8e821286e5b340c18 Mon Sep 17 00:00:00 2001 From: BoD Date: Tue, 25 Mar 2025 14:53:38 +0100 Subject: [PATCH 16/29] Revert removal of assertChainedCachesAreEqual --- .../kotlin/ConnectionPaginationTest.kt | 6 +++++ .../ConnectionProgrammaticPaginationTest.kt | 6 +++++ .../ConnectionWithNodesPaginationTest.kt | 6 +++++ .../kotlin/CursorBasedPaginationTest.kt | 6 +++++ .../OffsetBasedWithArrayPaginationTest.kt | 6 +++++ ...fsetBasedWithPageAndInputPaginationTest.kt | 6 +++++ .../OffsetBasedWithPagePaginationTest.kt | 26 +++++++++++++++++++ 7 files changed, 62 insertions(+) diff --git a/tests/pagination/src/commonTest/kotlin/ConnectionPaginationTest.kt b/tests/pagination/src/commonTest/kotlin/ConnectionPaginationTest.kt index 89740b4f..c11d70aa 100644 --- a/tests/pagination/src/commonTest/kotlin/ConnectionPaginationTest.kt +++ b/tests/pagination/src/commonTest/kotlin/ConnectionPaginationTest.kt @@ -73,6 +73,7 @@ class ConnectionPaginationTest { apolloStore.writeOperation(query1, data1) var dataFromStore = apolloStore.readOperation(query1).data assertEquals(data1, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Page after val query2 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx43")) @@ -135,6 +136,7 @@ class ConnectionPaginationTest { } } assertEquals(expectedData, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Page after val query3 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx45")) @@ -209,6 +211,7 @@ class ConnectionPaginationTest { } } assertEquals(expectedData, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Page before val query4 = UsersQuery(last = Optional.Present(2), before = Optional.Present("xx42")) @@ -295,6 +298,7 @@ class ConnectionPaginationTest { } } assertEquals(expectedData, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Non-contiguous page (should reset) val query5 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx50")) @@ -323,6 +327,7 @@ class ConnectionPaginationTest { apolloStore.writeOperation(query5, data5) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Empty page (should keep previous result) val query6 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx51")) @@ -338,6 +343,7 @@ class ConnectionPaginationTest { apolloStore.writeOperation(query6, data6) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) + assertChainedCachesAreEqual(apolloStore) } @Test diff --git a/tests/pagination/src/commonTest/kotlin/ConnectionProgrammaticPaginationTest.kt b/tests/pagination/src/commonTest/kotlin/ConnectionProgrammaticPaginationTest.kt index debdf44b..f20ea377 100644 --- a/tests/pagination/src/commonTest/kotlin/ConnectionProgrammaticPaginationTest.kt +++ b/tests/pagination/src/commonTest/kotlin/ConnectionProgrammaticPaginationTest.kt @@ -82,6 +82,7 @@ class ConnectionProgrammaticPaginationTest { apolloStore.writeOperation(query1, data1) var dataFromStore = apolloStore.readOperation(query1).data assertEquals(data1, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Page after val query2 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx43")) @@ -144,6 +145,7 @@ class ConnectionProgrammaticPaginationTest { } } assertEquals(expectedData, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Page after val query3 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx45")) @@ -218,6 +220,7 @@ class ConnectionProgrammaticPaginationTest { } } assertEquals(expectedData, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Page before val query4 = UsersQuery(last = Optional.Present(2), before = Optional.Present("xx42")) @@ -304,6 +307,7 @@ class ConnectionProgrammaticPaginationTest { } } assertEquals(expectedData, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Non-contiguous page (should reset) val query5 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx50")) @@ -332,6 +336,7 @@ class ConnectionProgrammaticPaginationTest { apolloStore.writeOperation(query5, data5) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Empty page (should keep previous result) val query6 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx51")) @@ -347,6 +352,7 @@ class ConnectionProgrammaticPaginationTest { apolloStore.writeOperation(query6, data6) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) + assertChainedCachesAreEqual(apolloStore) } } diff --git a/tests/pagination/src/commonTest/kotlin/ConnectionWithNodesPaginationTest.kt b/tests/pagination/src/commonTest/kotlin/ConnectionWithNodesPaginationTest.kt index 6555b282..50130b82 100644 --- a/tests/pagination/src/commonTest/kotlin/ConnectionWithNodesPaginationTest.kt +++ b/tests/pagination/src/commonTest/kotlin/ConnectionWithNodesPaginationTest.kt @@ -65,6 +65,7 @@ class ConnectionWithNodesPaginationTest { apolloStore.writeOperation(query1, data1) var dataFromStore = apolloStore.readOperation(query1).data assertEquals(data1, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Page after val query2 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx43")) @@ -109,6 +110,7 @@ class ConnectionWithNodesPaginationTest { } } assertEquals(expectedData, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Page after val query3 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx45")) @@ -159,6 +161,7 @@ class ConnectionWithNodesPaginationTest { } } assertEquals(expectedData, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Page before val query4 = UsersQuery(last = Optional.Present(2), before = Optional.Present("xx42")) @@ -215,6 +218,7 @@ class ConnectionWithNodesPaginationTest { } } assertEquals(expectedData, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Non-contiguous page (should reset) val query5 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx50")) @@ -237,6 +241,7 @@ class ConnectionWithNodesPaginationTest { apolloStore.writeOperation(query5, data5) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Empty page (should keep previous result) val query6 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx51")) @@ -253,6 +258,7 @@ class ConnectionWithNodesPaginationTest { apolloStore.writeOperation(query6, data6) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) + assertChainedCachesAreEqual(apolloStore) } } diff --git a/tests/pagination/src/commonTest/kotlin/CursorBasedPaginationTest.kt b/tests/pagination/src/commonTest/kotlin/CursorBasedPaginationTest.kt index 1ced1369..1449e92a 100644 --- a/tests/pagination/src/commonTest/kotlin/CursorBasedPaginationTest.kt +++ b/tests/pagination/src/commonTest/kotlin/CursorBasedPaginationTest.kt @@ -79,6 +79,7 @@ class CursorBasedPaginationTest { apolloStore.writeOperation(query1, data1) var dataFromStore = apolloStore.readOperation(query1).data assertEquals(data1, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Page after val query2 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx43")) @@ -141,6 +142,7 @@ class CursorBasedPaginationTest { } } assertEquals(expectedData, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Page after val query3 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx45")) @@ -215,6 +217,7 @@ class CursorBasedPaginationTest { } } assertEquals(expectedData, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Page before val query4 = UsersQuery(last = Optional.Present(2), before = Optional.Present("xx42")) @@ -301,6 +304,7 @@ class CursorBasedPaginationTest { } } assertEquals(expectedData, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Non-contiguous page (should reset) val query5 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx50")) @@ -329,6 +333,7 @@ class CursorBasedPaginationTest { apolloStore.writeOperation(query5, data5) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Empty page (should keep previous result) val query6 = UsersQuery(first = Optional.Present(2), after = Optional.Present("xx51")) @@ -344,6 +349,7 @@ class CursorBasedPaginationTest { apolloStore.writeOperation(query6, data6) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) + assertChainedCachesAreEqual(apolloStore) } @Test diff --git a/tests/pagination/src/commonTest/kotlin/OffsetBasedWithArrayPaginationTest.kt b/tests/pagination/src/commonTest/kotlin/OffsetBasedWithArrayPaginationTest.kt index 1be429b5..69925d93 100644 --- a/tests/pagination/src/commonTest/kotlin/OffsetBasedWithArrayPaginationTest.kt +++ b/tests/pagination/src/commonTest/kotlin/OffsetBasedWithArrayPaginationTest.kt @@ -56,6 +56,7 @@ class OffsetBasedWithArrayPaginationTest { apolloStore.writeOperation(query1, data1) var dataFromStore = apolloStore.readOperation(query1).data assertEquals(data1, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Page after val query2 = UsersQuery(offset = Optional.Present(44), limit = Optional.Present(2)) @@ -76,6 +77,7 @@ class OffsetBasedWithArrayPaginationTest { ) } assertEquals(expectedData, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Page in the middle val query3 = UsersQuery(offset = Optional.Present(44), limit = Optional.Present(3)) @@ -98,6 +100,7 @@ class OffsetBasedWithArrayPaginationTest { ) } assertEquals(expectedData, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Page before val query4 = UsersQuery(offset = Optional.Present(40), limit = Optional.Present(2)) @@ -121,6 +124,7 @@ class OffsetBasedWithArrayPaginationTest { ) } assertEquals(expectedData, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Non-contiguous page (should reset) val query5 = UsersQuery(offset = Optional.Present(50), limit = Optional.Present(2)) @@ -133,6 +137,7 @@ class OffsetBasedWithArrayPaginationTest { apolloStore.writeOperation(query5, data5) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Empty page (should keep previous result) val query6 = UsersQuery(offset = Optional.Present(52), limit = Optional.Present(2)) @@ -142,6 +147,7 @@ class OffsetBasedWithArrayPaginationTest { apolloStore.writeOperation(query6, data6) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) + assertChainedCachesAreEqual(apolloStore) } private class OffsetPaginationMetadataGenerator(private val fieldName: String) : MetadataGenerator { diff --git a/tests/pagination/src/commonTest/kotlin/OffsetBasedWithPageAndInputPaginationTest.kt b/tests/pagination/src/commonTest/kotlin/OffsetBasedWithPageAndInputPaginationTest.kt index 907f2584..07b0bd70 100644 --- a/tests/pagination/src/commonTest/kotlin/OffsetBasedWithPageAndInputPaginationTest.kt +++ b/tests/pagination/src/commonTest/kotlin/OffsetBasedWithPageAndInputPaginationTest.kt @@ -66,6 +66,7 @@ class OffsetBasedWithPageAndInputPaginationTest { apolloStore.writeOperation(query1, data1) var dataFromStore = apolloStore.readOperation(query1).data assertEquals(data1, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Page after val query2 = UsersQuery(offset = Optional.Present(44), limit = Optional.Present(2)) @@ -90,6 +91,7 @@ class OffsetBasedWithPageAndInputPaginationTest { } } assertEquals(expectedData, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Page in the middle val query3 = UsersQuery(offset = Optional.Present(44), limit = Optional.Present(3)) @@ -116,6 +118,7 @@ class OffsetBasedWithPageAndInputPaginationTest { } } assertEquals(expectedData, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Page before val query4 = UsersQuery(offset = Optional.Present(40), limit = Optional.Present(2)) @@ -143,6 +146,7 @@ class OffsetBasedWithPageAndInputPaginationTest { } } assertEquals(expectedData, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Non-contiguous page (should reset) val query5 = UsersQuery(offset = Optional.Present(50), limit = Optional.Present(2)) @@ -157,6 +161,7 @@ class OffsetBasedWithPageAndInputPaginationTest { apolloStore.writeOperation(query5, data5) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Empty page (should keep previous result) val query6 = UsersQuery(offset = Optional.Present(52), limit = Optional.Present(2)) @@ -168,6 +173,7 @@ class OffsetBasedWithPageAndInputPaginationTest { apolloStore.writeOperation(query6, data6) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) + assertChainedCachesAreEqual(apolloStore) } private class OffsetPaginationMetadataGenerator(private val typeName: String) : MetadataGenerator { diff --git a/tests/pagination/src/commonTest/kotlin/OffsetBasedWithPagePaginationTest.kt b/tests/pagination/src/commonTest/kotlin/OffsetBasedWithPagePaginationTest.kt index 8b0100b5..bcc3724e 100644 --- a/tests/pagination/src/commonTest/kotlin/OffsetBasedWithPagePaginationTest.kt +++ b/tests/pagination/src/commonTest/kotlin/OffsetBasedWithPagePaginationTest.kt @@ -4,11 +4,13 @@ import com.apollographql.apollo.api.Optional import com.apollographql.apollo.api.json.ApolloJsonElement import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.ApolloStore +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.FieldPolicyCacheResolver import com.apollographql.cache.normalized.api.FieldRecordMerger import com.apollographql.cache.normalized.api.MetadataGenerator import com.apollographql.cache.normalized.api.MetadataGeneratorContext import com.apollographql.cache.normalized.api.NormalizedCacheFactory +import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.TypePolicyCacheKeyGenerator import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.sql.SqlNormalizedCacheFactory @@ -59,6 +61,7 @@ class OffsetBasedWithPagePaginationTest { apolloStore.writeOperation(query1, data1) var dataFromStore = apolloStore.readOperation(query1).data assertEquals(data1, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Page after val query2 = UsersQuery(offset = Optional.Present(44), limit = Optional.Present(2)) @@ -83,6 +86,7 @@ class OffsetBasedWithPagePaginationTest { } } assertEquals(expectedData, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Page in the middle val query3 = UsersQuery(offset = Optional.Present(44), limit = Optional.Present(3)) @@ -109,6 +113,7 @@ class OffsetBasedWithPagePaginationTest { } } assertEquals(expectedData, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Page before val query4 = UsersQuery(offset = Optional.Present(40), limit = Optional.Present(2)) @@ -136,6 +141,7 @@ class OffsetBasedWithPagePaginationTest { } } assertEquals(expectedData, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Non-contiguous page (should reset) val query5 = UsersQuery(offset = Optional.Present(50), limit = Optional.Present(2)) @@ -150,6 +156,7 @@ class OffsetBasedWithPagePaginationTest { apolloStore.writeOperation(query5, data5) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) + assertChainedCachesAreEqual(apolloStore) // Empty page (should keep previous result) val query6 = UsersQuery(offset = Optional.Present(52), limit = Optional.Present(2)) @@ -161,6 +168,7 @@ class OffsetBasedWithPagePaginationTest { apolloStore.writeOperation(query6, data6) dataFromStore = apolloStore.readOperation(query1).data assertEquals(data5, dataFromStore) + assertChainedCachesAreEqual(apolloStore) } private class OffsetPaginationMetadataGenerator(private val typeName: String) : MetadataGenerator { @@ -218,3 +226,21 @@ class OffsetBasedWithPagePaginationTest { } } } + +internal fun assertChainedCachesAreEqual(apolloStore: ApolloStore) { + val dump = apolloStore.dump().filterKeys { + // Ignore optimistic cache for comparison + it.simpleName != "OptimisticNormalizedCache" + } + if (dump.size < 2) return + val caches = dump.values.toList() + val cache1: Map = caches[0] + val cache2: Map = caches[1] + for (key in cache1.keys) { + val record1 = cache1[key]!! + val record2 = cache2[key]!! + assertEquals(record1.key.key, record2.key.key) + assertEquals(record1.fields, record2.fields) + assertEquals(record1.metadata, record2.metadata) + } +} From e261f1d0218b31e82ff9174da079db2c9b59501a Mon Sep 17 00:00:00 2001 From: BoD Date: Tue, 25 Mar 2025 14:57:19 +0100 Subject: [PATCH 17/29] Fix cacheDumpProvider to include errors --- .../kotlin/com/apollographql/cache/normalized/ApolloStore.kt | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt index abdbfb54..72d11670 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt @@ -26,6 +26,7 @@ import com.apollographql.cache.normalized.api.NormalizedCache import com.apollographql.cache.normalized.api.NormalizedCacheFactory import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.RecordMerger +import com.apollographql.cache.normalized.api.RecordValue import com.apollographql.cache.normalized.api.TypePolicyCacheKeyGenerator import com.apollographql.cache.normalized.internal.DefaultApolloStore import com.benasher44.uuid.Uuid @@ -335,7 +336,7 @@ internal fun ApolloStore.cacheDumpProvider(): () -> Map null is String -> this @@ -345,6 +346,7 @@ private fun Any?.toExternal(): Any? { is Double -> this is JsonNumber -> this is CacheKey -> this.serialize() + is Error -> this is List<*> -> { map { it.toExternal() } } From 646ee02fb808b03ee3eaac0131bc82753d271e65 Mon Sep 17 00:00:00 2001 From: BoD Date: Tue, 25 Mar 2025 14:57:44 +0100 Subject: [PATCH 18/29] Minor tweak/rename --- .../apollographql/cache/normalized/memory/MemoryCache.kt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt index 7bc43088..41fdaed4 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt @@ -123,12 +123,12 @@ class MemoryCache( private fun internalMerge(record: Record, cacheHeaders: CacheHeaders, recordMerger: RecordMerger): Set { val receivedDate = cacheHeaders.headerValue(ApolloCacheHeaders.RECEIVED_DATE) val expirationDate = cacheHeaders.headerValue(ApolloCacheHeaders.EXPIRATION_DATE) - val oldRecord = loadRecord(record.key, cacheHeaders) - val changedKeys = if (oldRecord == null) { + val existingRecord = loadRecord(record.key, cacheHeaders) + val changedKeys = if (existingRecord == null) { lruCache[record.key] = record.withDates(receivedDate = receivedDate, expirationDate = expirationDate) record.fieldKeys() } else { - val (mergedRecord, changedKeys) = recordMerger.merge(RecordMergerContext(existing = oldRecord, incoming = record, cacheHeaders = cacheHeaders)) + val (mergedRecord, changedKeys) = recordMerger.merge(RecordMergerContext(existing = existingRecord, incoming = record, cacheHeaders = cacheHeaders)) lruCache[record.key] = mergedRecord.withDates(receivedDate = receivedDate, expirationDate = expirationDate) changedKeys } From 58657842499938767f009dad926c47ed577e3cf7 Mon Sep 17 00:00:00 2001 From: BoD Date: Tue, 25 Mar 2025 14:58:02 +0100 Subject: [PATCH 19/29] Remove debug --- tests/migration/src/commonTest/kotlin/MigrationTest.kt | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tests/migration/src/commonTest/kotlin/MigrationTest.kt b/tests/migration/src/commonTest/kotlin/MigrationTest.kt index c6bbf8e8..521b3c6a 100644 --- a/tests/migration/src/commonTest/kotlin/MigrationTest.kt +++ b/tests/migration/src/commonTest/kotlin/MigrationTest.kt @@ -9,7 +9,6 @@ import com.apollographql.cache.normalized.FetchPolicy import com.apollographql.cache.normalized.api.CacheHeaders import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.DefaultRecordMerger -import com.apollographql.cache.normalized.api.NormalizedCache import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.RecordValue import com.apollographql.cache.normalized.fetchPolicy @@ -141,9 +140,6 @@ class MigrationTest { val store = ApolloStore(SqlNormalizedCacheFactory(name = "modern.db")).also { it.clearAll() } store.migrateFrom(legacyStore) - println(NormalizedCache.prettifyDump(store.dump())) - - // Read the data back ApolloClient.Builder() .serverUrl(mockServer.url()) From 7b3b11ad3b0ea71cf9dd59abda5948f65a8fde57 Mon Sep 17 00:00:00 2001 From: BoD Date: Tue, 25 Mar 2025 15:01:24 +0100 Subject: [PATCH 20/29] Revert removed test --- .../src/commonTest/kotlin/StoreTest.kt | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/tests/normalized-cache/src/commonTest/kotlin/StoreTest.kt b/tests/normalized-cache/src/commonTest/kotlin/StoreTest.kt index c574e33c..3c36c9bd 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/StoreTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/StoreTest.kt @@ -105,6 +105,18 @@ class StoreTest { assertFriendIsNotCached("1003") } + @Test + @Throws(Exception::class) + fun directAccess() = runTest(before = { setUp() }) { + // put everything in the cache + storeAllFriends() + + store.accessCache { + it.remove(CacheKey("Character:1000"), false) + } + assertFriendIsNotCached("1000") + } + @Test fun testNewBuilderNewStore() = runTest(before = { setUp() }) { storeAllFriends() From 7d45d849165e792b18171e9c0a84608a2edbc719 Mon Sep 17 00:00:00 2001 From: BoD Date: Tue, 25 Mar 2025 15:01:41 +0100 Subject: [PATCH 21/29] Revert tweaked values --- .../com/apollographql/cache/normalized/MemoryCacheTest.kt | 2 +- .../com/apollographql/cache/normalized/RecordWeigherTest.kt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/MemoryCacheTest.kt b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/MemoryCacheTest.kt index 2677164e..b2fbb5b1 100644 --- a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/MemoryCacheTest.kt +++ b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/MemoryCacheTest.kt @@ -85,7 +85,7 @@ class MemoryCacheTest { val testRecord3 = createTestRecord("3") val lruCache = createCache( - maxSizeBytes = 2000 + maxSizeBytes = 800 ) val records = listOf(testRecord1, testRecord2, testRecord3) diff --git a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/RecordWeigherTest.kt b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/RecordWeigherTest.kt index e327ac8c..51cac0df 100644 --- a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/RecordWeigherTest.kt +++ b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/RecordWeigherTest.kt @@ -32,7 +32,7 @@ class RecordWeigherTest { ) ) - assertTrue(record.sizeInBytes <= 377) + assertTrue(record.sizeInBytes <= 284) assertTrue(record.sizeInBytes >= 258) // JS takes less space, maybe for strings? } } From f0232f184309979ae63f47dfd50d7cd12b00dbcd Mon Sep 17 00:00:00 2001 From: BoD Date: Tue, 25 Mar 2025 15:12:12 +0100 Subject: [PATCH 22/29] Update CHANGELOG.md --- CHANGELOG.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2a3cb8d0..4a2ea130 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,7 +1,10 @@ # Next version (unreleased) - Storage binary format is changed to be a bit more compact -- Add ApolloStore.trim() to remove old data from the cache +- Add `ApolloStore.trim()` to remove old data from the cache +- `CacheKey` is used in more APIs instead of `String`, for consistency. +- `ApolloCacheHeaders.EVICT_AFTER_READ` is removed. `ApolloStore.remove()` can be used instead. +- `NormalizedCache.remove(pattern: String)` is removed. Please open an issues if you need this feature back. # Version 0.0.7 _2025-03-03_ From 073c4d13a54630ac3c335387cc7eb9ac4f8c06aa Mon Sep 17 00:00:00 2001 From: BoD Date: Tue, 25 Mar 2025 20:03:55 +0100 Subject: [PATCH 23/29] Encode certain known metadata keys as single byte strings to save space --- .../normalized/sql/internal/RecordSerializer.kt | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordSerializer.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordSerializer.kt index 47c7e731..6a80943c 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordSerializer.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordSerializer.kt @@ -4,6 +4,7 @@ import com.apollographql.apollo.api.Error import com.apollographql.apollo.api.Error.Builder import com.apollographql.apollo.api.json.ApolloJsonElement import com.apollographql.apollo.api.json.JsonNumber +import com.apollographql.cache.normalized.api.ApolloCacheHeaders import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.RecordValue @@ -18,7 +19,7 @@ internal object RecordSerializer { val buffer = Buffer() buffer.writeMap(record.fields) buffer._writeInt(record.metadata.size) - for ((k, v) in record.metadata) { + for ((k, v) in record.metadata.mapKeys { (k, _) -> knownMetadataKeys[k] ?: k }) { buffer.writeString(k) buffer.writeMap(v) } @@ -35,7 +36,7 @@ internal object RecordSerializer { val v = buffer.readMap() put(k, v) } - } + }.mapKeys { (k, _) -> knownMetadataKeysInverted[k] ?: k } return Record( key = CacheKey(key), fields = fields, @@ -318,4 +319,11 @@ internal object RecordSerializer { private const val MAP_EMPTY = 19 private const val CACHE_KEY = 20 private const val ERROR = 21 + + // Encode certain known metadata keys as single byte strings to save space + private val knownMetadataKeys = mapOf( + ApolloCacheHeaders.RECEIVED_DATE to "0", + ApolloCacheHeaders.EXPIRATION_DATE to "1", + ) + private val knownMetadataKeysInverted = knownMetadataKeys.entries.associate { (k, v) -> v to k } } From 21ecb0391b2d8d2041b8013c689da4847a5c0577 Mon Sep 17 00:00:00 2001 From: BoD Date: Wed, 26 Mar 2025 17:30:12 +0100 Subject: [PATCH 24/29] Make CacheKey extensions internal, and add test-utils --- .../api/normalized-cache-incubating.api | 2 -- .../api/normalized-cache-incubating.klib.api | 2 -- .../cache/normalized/api/CacheKey.kt | 7 ++----- .../build.gradle.kts | 1 + .../normalized/sql/SqlNormalizedCacheTest.kt | 2 +- settings.gradle.kts | 1 + test-utils/README.md | 3 +++ test-utils/api/test-utils.api | 5 +++++ test-utils/api/test-utils.klib.api | 10 +++++++++ test-utils/build.gradle.kts | 21 +++++++++++++++++++ .../cache/normalized/testing/fieldKey.kt | 15 +++++++++++++ tests/defer/build.gradle.kts | 1 + .../kotlin/test/DeferNormalizedCacheTest.kt | 2 +- tests/garbage-collection/build.gradle.kts | 1 + .../kotlin/DanglingReferencesTest.kt | 4 ++-- .../commonTest/kotlin/GarbageCollectTest.kt | 4 ++-- .../src/commonTest/kotlin/StaleFieldsTest.kt | 4 ++-- tests/normalized-cache/build.gradle.kts | 1 + .../src/commonTest/kotlin/FetchPolicyTest.kt | 2 +- .../src/commonTest/kotlin/NormalizerTest.kt | 2 +- .../FragmentNormalizerTest.kt | 2 +- tests/partial-results/build.gradle.kts | 1 + .../kotlin/test/CachePartialResultTest.kt | 2 +- 23 files changed, 74 insertions(+), 21 deletions(-) create mode 100644 test-utils/README.md create mode 100644 test-utils/api/test-utils.api create mode 100644 test-utils/api/test-utils.klib.api create mode 100644 test-utils/build.gradle.kts create mode 100644 test-utils/src/commonMain/kotlin/com/apollographql/cache/normalized/testing/fieldKey.kt diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.api b/normalized-cache-incubating/api/normalized-cache-incubating.api index a5939ff1..5854cfbd 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.api @@ -266,8 +266,6 @@ public final class com/apollographql/cache/normalized/api/CacheKeyGeneratorConte } public final class com/apollographql/cache/normalized/api/CacheKeyKt { - public static final fun append-eNSUWrY (Ljava/lang/String;[Ljava/lang/String;)Ljava/lang/String; - public static final fun fieldKey-eNSUWrY (Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String; public static final fun isRootKey-pWl1Des (Ljava/lang/String;)Z } diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api index fb728d49..b9cfca4f 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api @@ -560,8 +560,6 @@ final val com.apollographql.cache.normalized/isFromCache // com.apollographql.ca final fun (com.apollographql.apollo/ApolloClient.Builder).com.apollographql.cache.normalized/logCacheMisses(kotlin/Function1 = ...): com.apollographql.apollo/ApolloClient.Builder // com.apollographql.cache.normalized/logCacheMisses|logCacheMisses@com.apollographql.apollo.ApolloClient.Builder(kotlin.Function1){}[0] final fun (com.apollographql.apollo/ApolloClient.Builder).com.apollographql.cache.normalized/normalizedCache(com.apollographql.cache.normalized.api/NormalizedCacheFactory, com.apollographql.cache.normalized.api/CacheKeyGenerator = ..., com.apollographql.cache.normalized.api/MetadataGenerator = ..., com.apollographql.cache.normalized.api/CacheResolver = ..., com.apollographql.cache.normalized.api/RecordMerger = ..., com.apollographql.cache.normalized.api/FieldKeyGenerator = ..., com.apollographql.cache.normalized.api/EmbeddedFieldsProvider = ..., kotlin/Boolean = ...): com.apollographql.apollo/ApolloClient.Builder // com.apollographql.cache.normalized/normalizedCache|normalizedCache@com.apollographql.apollo.ApolloClient.Builder(com.apollographql.cache.normalized.api.NormalizedCacheFactory;com.apollographql.cache.normalized.api.CacheKeyGenerator;com.apollographql.cache.normalized.api.MetadataGenerator;com.apollographql.cache.normalized.api.CacheResolver;com.apollographql.cache.normalized.api.RecordMerger;com.apollographql.cache.normalized.api.FieldKeyGenerator;com.apollographql.cache.normalized.api.EmbeddedFieldsProvider;kotlin.Boolean){}[0] final fun (com.apollographql.apollo/ApolloClient.Builder).com.apollographql.cache.normalized/store(com.apollographql.cache.normalized/ApolloStore, kotlin/Boolean = ...): com.apollographql.apollo/ApolloClient.Builder // com.apollographql.cache.normalized/store|store@com.apollographql.apollo.ApolloClient.Builder(com.apollographql.cache.normalized.ApolloStore;kotlin.Boolean){}[0] -final fun (com.apollographql.cache.normalized.api/CacheKey).com.apollographql.cache.normalized.api/append(kotlin/Array...): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.api/append|append@com.apollographql.cache.normalized.api.CacheKey(kotlin.Array...){}[0] -final fun (com.apollographql.cache.normalized.api/CacheKey).com.apollographql.cache.normalized.api/fieldKey(kotlin/String): kotlin/String // com.apollographql.cache.normalized.api/fieldKey|fieldKey@com.apollographql.cache.normalized.api.CacheKey(kotlin.String){}[0] final fun (com.apollographql.cache.normalized.api/CacheKey).com.apollographql.cache.normalized.api/isRootKey(): kotlin/Boolean // com.apollographql.cache.normalized.api/isRootKey|isRootKey@com.apollographql.cache.normalized.api.CacheKey(){}[0] final fun (com.apollographql.cache.normalized.api/NormalizedCache).com.apollographql.cache.normalized/allRecords(): kotlin.collections/Map // com.apollographql.cache.normalized/allRecords|allRecords@com.apollographql.cache.normalized.api.NormalizedCache(){}[0] final fun (com.apollographql.cache.normalized.api/NormalizedCache).com.apollographql.cache.normalized/garbageCollect(com.apollographql.cache.normalized.api/MaxAgeProvider, kotlin.time/Duration = ...): com.apollographql.cache.normalized/GarbageCollectResult // com.apollographql.cache.normalized/garbageCollect|garbageCollect@com.apollographql.cache.normalized.api.NormalizedCache(com.apollographql.cache.normalized.api.MaxAgeProvider;kotlin.time.Duration){}[0] diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt index 0243680f..8648967f 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt @@ -1,6 +1,5 @@ package com.apollographql.cache.normalized.api -import com.apollographql.apollo.annotations.ApolloInternal import kotlin.jvm.JvmInline import kotlin.jvm.JvmStatic @@ -80,13 +79,11 @@ fun CacheKey.isRootKey(): Boolean { return this == CacheKey.rootKey() } -@ApolloInternal -fun CacheKey.fieldKey(fieldName: String): String { +internal fun CacheKey.fieldKey(fieldName: String): String { return "${keyToString()}.$fieldName" } -@ApolloInternal -fun CacheKey.append(vararg keys: String): CacheKey { +internal fun CacheKey.append(vararg keys: String): CacheKey { var cacheKey: CacheKey = this for (key in keys) { cacheKey = CacheKey("${cacheKey.key}.$key") diff --git a/normalized-cache-sqlite-incubating/build.gradle.kts b/normalized-cache-sqlite-incubating/build.gradle.kts index 5e910e35..66cfcca4 100644 --- a/normalized-cache-sqlite-incubating/build.gradle.kts +++ b/normalized-cache-sqlite-incubating/build.gradle.kts @@ -87,6 +87,7 @@ kotlin { dependencies { implementation(libs.kotlin.test) implementation(libs.apollo.testing.support) + implementation(project(":test-utils")) } } } diff --git a/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt b/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt index c690fc14..b8835714 100644 --- a/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt +++ b/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt @@ -13,8 +13,8 @@ import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.DefaultRecordMerger import com.apollographql.cache.normalized.api.NormalizedCache import com.apollographql.cache.normalized.api.Record -import com.apollographql.cache.normalized.api.fieldKey import com.apollographql.cache.normalized.sql.internal.RecordDatabase +import com.apollographql.cache.normalized.testing.fieldKey import kotlin.test.BeforeTest import kotlin.test.Test import kotlin.test.assertEquals diff --git a/settings.gradle.kts b/settings.gradle.kts index b85c4ce8..e6f2a61e 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -8,4 +8,5 @@ include( "normalized-cache-incubating", "normalized-cache-sqlite-incubating", "normalized-cache-apollo-compiler-plugin", + "test-utils", ) diff --git a/test-utils/README.md b/test-utils/README.md new file mode 100644 index 00000000..bcf6f6c2 --- /dev/null +++ b/test-utils/README.md @@ -0,0 +1,3 @@ +# Module test-utils + +A few testing utilities (not published). diff --git a/test-utils/api/test-utils.api b/test-utils/api/test-utils.api new file mode 100644 index 00000000..aa8aed1d --- /dev/null +++ b/test-utils/api/test-utils.api @@ -0,0 +1,5 @@ +public final class com/apollographql/cache/normalized/testing/FieldKeyKt { + public static final fun append-eNSUWrY (Ljava/lang/String;[Ljava/lang/String;)Ljava/lang/String; + public static final fun fieldKey-eNSUWrY (Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String; +} + diff --git a/test-utils/api/test-utils.klib.api b/test-utils/api/test-utils.klib.api new file mode 100644 index 00000000..adcf651c --- /dev/null +++ b/test-utils/api/test-utils.klib.api @@ -0,0 +1,10 @@ +// Klib ABI Dump +// Targets: [iosArm64, iosSimulatorArm64, iosX64, js, macosArm64, macosX64, tvosArm64, tvosSimulatorArm64, tvosX64, wasmJs, watchosArm32, watchosArm64, watchosSimulatorArm64] +// Rendering settings: +// - Signature version: 2 +// - Show manifest properties: true +// - Show declarations: true + +// Library unique name: +final fun (com.apollographql.cache.normalized.api/CacheKey).com.apollographql.cache.normalized.testing/append(kotlin/Array...): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.testing/append|append@com.apollographql.cache.normalized.api.CacheKey(kotlin.Array...){}[0] +final fun (com.apollographql.cache.normalized.api/CacheKey).com.apollographql.cache.normalized.testing/fieldKey(kotlin/String): kotlin/String // com.apollographql.cache.normalized.testing/fieldKey|fieldKey@com.apollographql.cache.normalized.api.CacheKey(kotlin.String){}[0] diff --git a/test-utils/build.gradle.kts b/test-utils/build.gradle.kts new file mode 100644 index 00000000..06538cc8 --- /dev/null +++ b/test-utils/build.gradle.kts @@ -0,0 +1,21 @@ +plugins { + id("org.jetbrains.kotlin.multiplatform") +} + +group = "com.apollographql.cache" + +kotlin { + configureKmp( + withJs = true, + withWasm = true, + withAndroid = false, + ) + + sourceSets { + getByName("commonMain") { + dependencies { + api(project(":normalized-cache-incubating")) + } + } + } +} diff --git a/test-utils/src/commonMain/kotlin/com/apollographql/cache/normalized/testing/fieldKey.kt b/test-utils/src/commonMain/kotlin/com/apollographql/cache/normalized/testing/fieldKey.kt new file mode 100644 index 00000000..b4b01b13 --- /dev/null +++ b/test-utils/src/commonMain/kotlin/com/apollographql/cache/normalized/testing/fieldKey.kt @@ -0,0 +1,15 @@ +package com.apollographql.cache.normalized.testing + +import com.apollographql.cache.normalized.api.CacheKey + +fun CacheKey.fieldKey(fieldName: String): String { + return "${keyToString()}.$fieldName" +} + +fun CacheKey.append(vararg keys: String): CacheKey { + var cacheKey: CacheKey = this + for (key in keys) { + cacheKey = CacheKey("${cacheKey.key}.$key") + } + return cacheKey +} diff --git a/tests/defer/build.gradle.kts b/tests/defer/build.gradle.kts index cc0c7030..abc60ab5 100644 --- a/tests/defer/build.gradle.kts +++ b/tests/defer/build.gradle.kts @@ -24,6 +24,7 @@ kotlin { implementation(libs.kotlin.test) implementation(libs.apollo.testing.support) implementation(libs.apollo.mockserver) + implementation("com.apollographql.cache:test-utils") } } } diff --git a/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt b/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt index f0a3b353..33e44e46 100644 --- a/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt +++ b/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt @@ -16,12 +16,12 @@ import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.FetchPolicy import com.apollographql.cache.normalized.api.CacheHeaders import com.apollographql.cache.normalized.api.CacheKey -import com.apollographql.cache.normalized.api.append import com.apollographql.cache.normalized.apolloStore import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.optimisticUpdates import com.apollographql.cache.normalized.store +import com.apollographql.cache.normalized.testing.append import com.apollographql.mockserver.MockServer import com.apollographql.mockserver.assertNoRequest import com.apollographql.mockserver.awaitRequest diff --git a/tests/garbage-collection/build.gradle.kts b/tests/garbage-collection/build.gradle.kts index 4e7d06ac..a70bbd27 100644 --- a/tests/garbage-collection/build.gradle.kts +++ b/tests/garbage-collection/build.gradle.kts @@ -24,6 +24,7 @@ kotlin { implementation(libs.apollo.testing.support) implementation(libs.apollo.mockserver) implementation(libs.kotlin.test) + implementation("com.apollographql.cache:test-utils") } } } diff --git a/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt b/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt index 32aa0c78..1ec8329b 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt @@ -6,13 +6,13 @@ import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.FetchPolicy import com.apollographql.cache.normalized.allRecords import com.apollographql.cache.normalized.api.CacheKey -import com.apollographql.cache.normalized.api.append -import com.apollographql.cache.normalized.api.fieldKey import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.removeDanglingReferences import com.apollographql.cache.normalized.sql.SqlNormalizedCacheFactory import com.apollographql.cache.normalized.store +import com.apollographql.cache.normalized.testing.append +import com.apollographql.cache.normalized.testing.fieldKey import com.apollographql.mockserver.MockServer import com.apollographql.mockserver.enqueueString import kotlinx.coroutines.test.TestResult diff --git a/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt b/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt index 0c1fe1ed..f7f10cc6 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt @@ -7,14 +7,14 @@ import com.apollographql.cache.normalized.FetchPolicy import com.apollographql.cache.normalized.allRecords import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.SchemaCoordinatesMaxAgeProvider -import com.apollographql.cache.normalized.api.append -import com.apollographql.cache.normalized.api.fieldKey import com.apollographql.cache.normalized.cacheHeaders import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.garbageCollect import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.sql.SqlNormalizedCacheFactory import com.apollographql.cache.normalized.store +import com.apollographql.cache.normalized.testing.append +import com.apollographql.cache.normalized.testing.fieldKey import com.apollographql.mockserver.MockServer import com.apollographql.mockserver.enqueueString import okio.use diff --git a/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt b/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt index 88649b73..7333848c 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt @@ -11,14 +11,14 @@ import com.apollographql.cache.normalized.api.CacheHeaders import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.GlobalMaxAgeProvider import com.apollographql.cache.normalized.api.SchemaCoordinatesMaxAgeProvider -import com.apollographql.cache.normalized.api.append -import com.apollographql.cache.normalized.api.fieldKey import com.apollographql.cache.normalized.cacheHeaders import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.removeStaleFields import com.apollographql.cache.normalized.sql.SqlNormalizedCacheFactory import com.apollographql.cache.normalized.store +import com.apollographql.cache.normalized.testing.append +import com.apollographql.cache.normalized.testing.fieldKey import com.apollographql.mockserver.MockServer import com.apollographql.mockserver.enqueueString import okio.use diff --git a/tests/normalized-cache/build.gradle.kts b/tests/normalized-cache/build.gradle.kts index 9f3bbebf..babe8e4c 100644 --- a/tests/normalized-cache/build.gradle.kts +++ b/tests/normalized-cache/build.gradle.kts @@ -31,6 +31,7 @@ kotlin { implementation(libs.apollo.mockserver) implementation(libs.kotlin.test) implementation(libs.turbine) + implementation("com.apollographql.cache:test-utils") } } diff --git a/tests/normalized-cache/src/commonTest/kotlin/FetchPolicyTest.kt b/tests/normalized-cache/src/commonTest/kotlin/FetchPolicyTest.kt index dda69db3..ca6cd9b3 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/FetchPolicyTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/FetchPolicyTest.kt @@ -26,12 +26,12 @@ import com.apollographql.cache.normalized.CacheFirstInterceptor import com.apollographql.cache.normalized.CacheOnlyInterceptor import com.apollographql.cache.normalized.FetchPolicy import com.apollographql.cache.normalized.api.CacheKey -import com.apollographql.cache.normalized.api.fieldKey import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.isFromCache import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.refetchPolicyInterceptor import com.apollographql.cache.normalized.store +import com.apollographql.cache.normalized.testing.fieldKey import com.apollographql.cache.normalized.watch import com.apollographql.mockserver.MockServer import com.apollographql.mockserver.awaitRequest diff --git a/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt b/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt index b46a23c6..2a08e52d 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt @@ -8,9 +8,9 @@ import com.apollographql.cache.normalized.api.DefaultRecordMerger import com.apollographql.cache.normalized.api.IdCacheKeyGenerator import com.apollographql.cache.normalized.api.NormalizedCache import com.apollographql.cache.normalized.api.Record -import com.apollographql.cache.normalized.api.append import com.apollographql.cache.normalized.internal.normalized import com.apollographql.cache.normalized.memory.MemoryCacheFactory +import com.apollographql.cache.normalized.testing.append import httpcache.AllPlanetsQuery import normalizer.EpisodeHeroNameQuery import normalizer.HeroAndFriendsNamesQuery diff --git a/tests/normalized-cache/src/commonTest/kotlin/fragmentnormalizer/FragmentNormalizerTest.kt b/tests/normalized-cache/src/commonTest/kotlin/fragmentnormalizer/FragmentNormalizerTest.kt index 31e02466..4805f18f 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/fragmentnormalizer/FragmentNormalizerTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/fragmentnormalizer/FragmentNormalizerTest.kt @@ -5,11 +5,11 @@ import com.apollographql.apollo.api.CustomScalarAdapters import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.IdCacheKeyGenerator -import com.apollographql.cache.normalized.api.append import com.apollographql.cache.normalized.apolloStore import com.apollographql.cache.normalized.internal.normalized import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.normalizedCache +import com.apollographql.cache.normalized.testing.append import fragmentnormalizer.fragment.ConversationFragment import fragmentnormalizer.fragment.ConversationFragmentImpl import kotlin.test.Test diff --git a/tests/partial-results/build.gradle.kts b/tests/partial-results/build.gradle.kts index f0e14739..5941d46c 100644 --- a/tests/partial-results/build.gradle.kts +++ b/tests/partial-results/build.gradle.kts @@ -24,6 +24,7 @@ kotlin { implementation(libs.apollo.testing.support) implementation(libs.apollo.mockserver) implementation(libs.kotlin.test) + implementation("com.apollographql.cache:test-utils") } } diff --git a/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt b/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt index a0ac292e..b84989b2 100644 --- a/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt +++ b/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt @@ -19,7 +19,6 @@ import com.apollographql.cache.normalized.api.IdCacheKeyGenerator import com.apollographql.cache.normalized.api.IdCacheKeyResolver import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.SchemaCoordinatesMaxAgeProvider -import com.apollographql.cache.normalized.api.append import com.apollographql.cache.normalized.apolloStore import com.apollographql.cache.normalized.fetchFromCache import com.apollographql.cache.normalized.fetchPolicy @@ -28,6 +27,7 @@ import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.normalizedCache import com.apollographql.cache.normalized.store import com.apollographql.cache.normalized.storeReceivedDate +import com.apollographql.cache.normalized.testing.append import com.apollographql.mockserver.MockServer import com.apollographql.mockserver.enqueueString import kotlinx.coroutines.flow.Flow From db4d91d866e16d3a2b70efe47714971700d079be Mon Sep 17 00:00:00 2001 From: BoD Date: Wed, 26 Mar 2025 18:01:36 +0100 Subject: [PATCH 25/29] Make CacheKey.keyToString() internal --- .../api/normalized-cache-incubating.api | 1 - .../api/normalized-cache-incubating.klib.api | 1 - .../kotlin/com/apollographql/cache/normalized/api/CacheKey.kt | 2 +- test-utils/api/test-utils.api | 3 ++- test-utils/api/test-utils.klib.api | 1 + .../cache/normalized/testing/{fieldKey.kt => CacheKey.kt} | 4 ++++ .../src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt | 1 + .../normalized-cache/src/commonTest/kotlin/OtherCacheTest.kt | 1 + .../src/jvmTest/kotlin/CacheMissLoggingInterceptorTest.kt | 1 + .../src/commonTest/kotlin/test/CachePartialResultTest.kt | 1 + 10 files changed, 12 insertions(+), 4 deletions(-) rename test-utils/src/commonMain/kotlin/com/apollographql/cache/normalized/testing/{fieldKey.kt => CacheKey.kt} (87%) diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.api b/normalized-cache-incubating/api/normalized-cache-incubating.api index 5854cfbd..1e410c46 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.api @@ -241,7 +241,6 @@ public final class com/apollographql/cache/normalized/api/CacheKey { public final fun getKey ()Ljava/lang/String; public fun hashCode ()I public static fun hashCode-impl (Ljava/lang/String;)I - public static final fun keyToString-impl (Ljava/lang/String;)Ljava/lang/String; public static final fun rootKey-mqw0cJ0 ()Ljava/lang/String; public static final fun serialize-impl (Ljava/lang/String;)Ljava/lang/String; public fun toString ()Ljava/lang/String; diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api index b9cfca4f..4ca53d65 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api @@ -471,7 +471,6 @@ final value class com.apollographql.cache.normalized.api/CacheKey { // com.apoll final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.api/CacheKey.equals|equals(kotlin.Any?){}[0] final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.api/CacheKey.hashCode|hashCode(){}[0] - final fun keyToString(): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.keyToString|keyToString(){}[0] final fun serialize(): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.serialize|serialize(){}[0] final fun toString(): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.toString|toString(){}[0] diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt index 8648967f..68fb9b01 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt @@ -35,7 +35,7 @@ value class CacheKey( */ constructor(typename: String, vararg values: String) : this(typename, values.toList()) - fun keyToString(): String { + internal fun keyToString(): String { return key } diff --git a/test-utils/api/test-utils.api b/test-utils/api/test-utils.api index aa8aed1d..deee5231 100644 --- a/test-utils/api/test-utils.api +++ b/test-utils/api/test-utils.api @@ -1,5 +1,6 @@ -public final class com/apollographql/cache/normalized/testing/FieldKeyKt { +public final class com/apollographql/cache/normalized/testing/CacheKeyKt { public static final fun append-eNSUWrY (Ljava/lang/String;[Ljava/lang/String;)Ljava/lang/String; public static final fun fieldKey-eNSUWrY (Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String; + public static final fun keyToString-pWl1Des (Ljava/lang/String;)Ljava/lang/String; } diff --git a/test-utils/api/test-utils.klib.api b/test-utils/api/test-utils.klib.api index adcf651c..a99a94d6 100644 --- a/test-utils/api/test-utils.klib.api +++ b/test-utils/api/test-utils.klib.api @@ -8,3 +8,4 @@ // Library unique name: final fun (com.apollographql.cache.normalized.api/CacheKey).com.apollographql.cache.normalized.testing/append(kotlin/Array...): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.testing/append|append@com.apollographql.cache.normalized.api.CacheKey(kotlin.Array...){}[0] final fun (com.apollographql.cache.normalized.api/CacheKey).com.apollographql.cache.normalized.testing/fieldKey(kotlin/String): kotlin/String // com.apollographql.cache.normalized.testing/fieldKey|fieldKey@com.apollographql.cache.normalized.api.CacheKey(kotlin.String){}[0] +final fun (com.apollographql.cache.normalized.api/CacheKey).com.apollographql.cache.normalized.testing/keyToString(): kotlin/String // com.apollographql.cache.normalized.testing/keyToString|keyToString@com.apollographql.cache.normalized.api.CacheKey(){}[0] diff --git a/test-utils/src/commonMain/kotlin/com/apollographql/cache/normalized/testing/fieldKey.kt b/test-utils/src/commonMain/kotlin/com/apollographql/cache/normalized/testing/CacheKey.kt similarity index 87% rename from test-utils/src/commonMain/kotlin/com/apollographql/cache/normalized/testing/fieldKey.kt rename to test-utils/src/commonMain/kotlin/com/apollographql/cache/normalized/testing/CacheKey.kt index b4b01b13..51f35a94 100644 --- a/test-utils/src/commonMain/kotlin/com/apollographql/cache/normalized/testing/fieldKey.kt +++ b/test-utils/src/commonMain/kotlin/com/apollographql/cache/normalized/testing/CacheKey.kt @@ -13,3 +13,7 @@ fun CacheKey.append(vararg keys: String): CacheKey { } return cacheKey } + +fun CacheKey.keyToString(): String { + return key +} diff --git a/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt b/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt index 33e44e46..a5bef974 100644 --- a/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt +++ b/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt @@ -22,6 +22,7 @@ import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.optimisticUpdates import com.apollographql.cache.normalized.store import com.apollographql.cache.normalized.testing.append +import com.apollographql.cache.normalized.testing.keyToString import com.apollographql.mockserver.MockServer import com.apollographql.mockserver.assertNoRequest import com.apollographql.mockserver.awaitRequest diff --git a/tests/normalized-cache/src/commonTest/kotlin/OtherCacheTest.kt b/tests/normalized-cache/src/commonTest/kotlin/OtherCacheTest.kt index d18ddb6c..cad1d811 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/OtherCacheTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/OtherCacheTest.kt @@ -12,6 +12,7 @@ import com.apollographql.cache.normalized.api.IdCacheKeyResolver import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.store +import com.apollographql.cache.normalized.testing.keyToString import com.apollographql.mockserver.MockServer import com.apollographql.mockserver.enqueueString import normalizer.CharacterDetailsQuery diff --git a/tests/normalized-cache/src/jvmTest/kotlin/CacheMissLoggingInterceptorTest.kt b/tests/normalized-cache/src/jvmTest/kotlin/CacheMissLoggingInterceptorTest.kt index 3970edd5..f80e4831 100644 --- a/tests/normalized-cache/src/jvmTest/kotlin/CacheMissLoggingInterceptorTest.kt +++ b/tests/normalized-cache/src/jvmTest/kotlin/CacheMissLoggingInterceptorTest.kt @@ -8,6 +8,7 @@ import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.logCacheMisses import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.normalizedCache +import com.apollographql.cache.normalized.testing.keyToString import com.apollographql.mockserver.MockServer import com.apollographql.mockserver.enqueueString import normalizer.HeroAppearsInQuery diff --git a/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt b/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt index b84989b2..0db6be13 100644 --- a/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt +++ b/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt @@ -28,6 +28,7 @@ import com.apollographql.cache.normalized.normalizedCache import com.apollographql.cache.normalized.store import com.apollographql.cache.normalized.storeReceivedDate import com.apollographql.cache.normalized.testing.append +import com.apollographql.cache.normalized.testing.keyToString import com.apollographql.mockserver.MockServer import com.apollographql.mockserver.enqueueString import kotlinx.coroutines.flow.Flow From 3e7ecde2ee9159d1e207dfc840e016c910f283b4 Mon Sep 17 00:00:00 2001 From: BoD Date: Wed, 26 Mar 2025 18:35:04 +0100 Subject: [PATCH 26/29] Add a comment about using string lengths --- .../cache/normalized/internal/RecordWeigher.kt | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/RecordWeigher.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/RecordWeigher.kt index b4f367b9..8129d27a 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/RecordWeigher.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/RecordWeigher.kt @@ -5,7 +5,6 @@ import com.apollographql.apollo.api.json.JsonNumber import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.RecordValue -import okio.internal.commonAsUtf8ToByteArray import kotlin.jvm.JvmStatic internal object RecordWeigher { @@ -36,14 +35,18 @@ internal object RecordWeigher { } private fun weighField(field: RecordValue): Int { + /* + * Note: for Strings we use the character length where we should use the UTF-8 size, + * but this is a good enough estimation for the weight, and avoids a bit of overhead. + */ return when (field) { null -> SIZE_OF_NULL - is String -> field.commonAsUtf8ToByteArray().size + is String -> field.length is Boolean -> SIZE_OF_BOOLEAN is Int -> SIZE_OF_INT is Long -> SIZE_OF_LONG // Might happen with LongDataAdapter is Double -> SIZE_OF_DOUBLE - is JsonNumber -> field.value.commonAsUtf8ToByteArray().size + SIZE_OF_LONG + is JsonNumber -> field.value.length + SIZE_OF_LONG /** * Custom scalars with a json object representation are stored directly in the record */ @@ -61,7 +64,7 @@ internal object RecordWeigher { is Error -> { SIZE_OF_ERROR_OVERHEAD + - field.message.commonAsUtf8ToByteArray().size + + field.message.length + (field.locations?.size ?: 0) * SIZE_OF_INT * 2 + weighField(field.path) + weighField(field.extensions) + From 59f1296c4ea0b4994fa07b8c28f72d24660a015f Mon Sep 17 00:00:00 2001 From: BoD Date: Thu, 27 Mar 2025 14:58:49 +0100 Subject: [PATCH 27/29] Remove CacheKey.serialize() and co. --- .../api/normalized-cache-incubating.api | 5 ---- .../api/normalized-cache-incubating.klib.api | 3 --- .../cache/normalized/ApolloStore.kt | 4 ++-- .../cache/normalized/api/CacheKey.kt | 24 ------------------- 4 files changed, 2 insertions(+), 34 deletions(-) diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.api b/normalized-cache-incubating/api/normalized-cache-incubating.api index 1e410c46..12312a9a 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.api @@ -230,11 +230,9 @@ public final class com/apollographql/cache/normalized/api/CacheHeaders$Companion public final class com/apollographql/cache/normalized/api/CacheKey { public static final field Companion Lcom/apollographql/cache/normalized/api/CacheKey$Companion; public static final synthetic fun box-impl (Ljava/lang/String;)Lcom/apollographql/cache/normalized/api/CacheKey; - public static final fun canDeserialize (Ljava/lang/String;)Z public static fun constructor-impl (Ljava/lang/String;)Ljava/lang/String; public static fun constructor-impl (Ljava/lang/String;Ljava/util/List;)Ljava/lang/String; public static fun constructor-impl (Ljava/lang/String;[Ljava/lang/String;)Ljava/lang/String; - public static final fun deserialize-gE2UBb4 (Ljava/lang/String;)Ljava/lang/String; public fun equals (Ljava/lang/Object;)Z public static fun equals-impl (Ljava/lang/String;Ljava/lang/Object;)Z public static final fun equals-impl0 (Ljava/lang/String;Ljava/lang/String;)Z @@ -242,15 +240,12 @@ public final class com/apollographql/cache/normalized/api/CacheKey { public fun hashCode ()I public static fun hashCode-impl (Ljava/lang/String;)I public static final fun rootKey-mqw0cJ0 ()Ljava/lang/String; - public static final fun serialize-impl (Ljava/lang/String;)Ljava/lang/String; public fun toString ()Ljava/lang/String; public static fun toString-impl (Ljava/lang/String;)Ljava/lang/String; public final synthetic fun unbox-impl ()Ljava/lang/String; } public final class com/apollographql/cache/normalized/api/CacheKey$Companion { - public final fun canDeserialize (Ljava/lang/String;)Z - public final fun deserialize-gE2UBb4 (Ljava/lang/String;)Ljava/lang/String; public final fun rootKey-mqw0cJ0 ()Ljava/lang/String; } diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api index 4ca53d65..f4822de5 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api @@ -471,12 +471,9 @@ final value class com.apollographql.cache.normalized.api/CacheKey { // com.apoll final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.api/CacheKey.equals|equals(kotlin.Any?){}[0] final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.api/CacheKey.hashCode|hashCode(){}[0] - final fun serialize(): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.serialize|serialize(){}[0] final fun toString(): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.toString|toString(){}[0] final object Companion { // com.apollographql.cache.normalized.api/CacheKey.Companion|null[0] - final fun canDeserialize(kotlin/String): kotlin/Boolean // com.apollographql.cache.normalized.api/CacheKey.Companion.canDeserialize|canDeserialize(kotlin.String){}[0] - final fun deserialize(kotlin/String): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.api/CacheKey.Companion.deserialize|deserialize(kotlin.String){}[0] final fun rootKey(): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.api/CacheKey.Companion.rootKey|rootKey(){}[0] } } diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt index 72d11670..666f7ed2 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt @@ -345,8 +345,8 @@ private fun RecordValue.toExternal(): Any? { is Long -> this is Double -> this is JsonNumber -> this - is CacheKey -> this.serialize() - is Error -> this + is CacheKey -> "ApolloCacheReference{${this.keyToString()}}" + is Error -> "ApolloCacheError{${this.message}}" is List<*> -> { map { it.toExternal() } } diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt index 68fb9b01..37aeede6 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt @@ -41,31 +41,7 @@ value class CacheKey( override fun toString() = "CacheKey(${keyToString()})" - fun serialize(): String { - return "$SERIALIZATION_TEMPLATE{${keyToString()}}" - } - companion object { - // IntelliJ complains about the invalid escape but looks like JS still needs it. - // See https://youtrack.jetbrains.com/issue/KT-47189 - @Suppress("RegExpRedundantEscape") - private val SERIALIZATION_REGEX_PATTERN = Regex("ApolloCacheReference\\{(.*)\\}") - private const val SERIALIZATION_TEMPLATE = "ApolloCacheReference" - - @JvmStatic - fun deserialize(serializedCacheKey: String): CacheKey { - val values = SERIALIZATION_REGEX_PATTERN.matchEntire(serializedCacheKey)?.groupValues - require(values != null && values.size > 1) { - "Not a cache reference: $serializedCacheKey Must be of the form: $SERIALIZATION_TEMPLATE{%s}" - } - return CacheKey(values[1]) - } - - @JvmStatic - fun canDeserialize(value: String): Boolean { - return SERIALIZATION_REGEX_PATTERN.matches(value) - } - private val ROOT_CACHE_KEY = CacheKey("QUERY_ROOT") @JvmStatic From 98402186379a70818efc364f1c0cd839245bd693 Mon Sep 17 00:00:00 2001 From: BoD Date: Thu, 27 Mar 2025 16:21:34 +0100 Subject: [PATCH 28/29] Optim: avoid some iterations while avoiding some iterations --- .../cache/normalized/api/DataWithErrors.kt | 15 ------- .../normalized/internal/CacheBatchReader.kt | 40 +++++++++++++++++-- .../normalized/internal/DefaultApolloStore.kt | 3 +- 3 files changed, 38 insertions(+), 20 deletions(-) diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/DataWithErrors.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/DataWithErrors.kt index 6881caca..0bc9a7a9 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/DataWithErrors.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/DataWithErrors.kt @@ -186,18 +186,3 @@ private fun CompiledSelection.fieldSelection(responseName: String): CompiledFiel .build() } } - -@Suppress("UNCHECKED_CAST") -internal fun Any?.hasErrors(): Boolean { - val queue = ArrayDeque() - queue.add(this) - while (queue.isNotEmpty()) { - val current = queue.removeFirst() - when (current) { - is Error -> return true - is Map<*, *> -> queue.addAll(current.values) - is List<*> -> queue.addAll(current) - } - } - return false -} diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/CacheBatchReader.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/CacheBatchReader.kt index 1fa57607..b5a3f2b7 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/CacheBatchReader.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/CacheBatchReader.kt @@ -57,6 +57,11 @@ internal class CacheBatchReader( */ private var isStale = false + /** + * True if at least one of the resolved fields is an Error, or if a cache miss happened + */ + private var hasErrors = false + private val pendingReferences = mutableListOf() private class CollectState(val variables: Executable.Variables) { @@ -104,8 +109,13 @@ internal class CacheBatchReader( ) while (pendingReferences.isNotEmpty()) { - val records = cache.loadRecords(pendingReferences.map { it.key }, cacheHeaders).associateBy { it.key } - + val records: Map = cache.loadRecords(pendingReferences.map { it.key }, cacheHeaders) + .also { + if (!hasErrors) { + hasErrors = it.any { it.values.any { it.hasErrors() } } + } + } + .associateBy { it.key } val copy = pendingReferences.toList() pendingReferences.clear() copy.forEach { pendingReference -> @@ -118,6 +128,7 @@ internal class CacheBatchReader( if (returnPartialResponses) { data[pendingReference.path] = cacheMissError(CacheMissException(key = pendingReference.key.keyToString(), fieldName = null, stale = false), path = pendingReference.path) + hasErrors = true return@forEach } else { throw CacheMissException(pendingReference.key.keyToString()) @@ -149,6 +160,7 @@ internal class CacheBatchReader( } catch (e: CacheMissException) { if (e.stale) isStale = true if (returnPartialResponses) { + hasErrors = true cacheMissError(e, pendingReference.path + it.responseName) } else { throw e @@ -163,7 +175,11 @@ internal class CacheBatchReader( } } - return CacheBatchReaderData(data, CacheHeaders.Builder().apply { if (isStale) addHeader(ApolloCacheHeaders.STALE, "true") }.build()) + return CacheBatchReaderData( + data = data, + cacheHeaders = CacheHeaders.Builder().apply { if (isStale) addHeader(ApolloCacheHeaders.STALE, "true") }.build(), + hasErrors = hasErrors, + ) } private fun Any?.unwrap(): Any? { @@ -234,6 +250,7 @@ internal class CacheBatchReader( } catch (e: CacheMissException) { if (e.stale) isStale = true if (returnPartialResponses) { + hasErrors = true cacheMissError(e, path + it.responseName) } else { throw e @@ -248,6 +265,7 @@ internal class CacheBatchReader( internal class CacheBatchReaderData( private val data: Map, Any>, val cacheHeaders: CacheHeaders, + val hasErrors: Boolean, ) { @Suppress("UNCHECKED_CAST") internal fun toMap(withErrors: Boolean = true): DataWithErrors { @@ -304,4 +322,20 @@ internal class CacheBatchReader( .cacheMissException(exception) .build() } + + @Suppress("UNCHECKED_CAST") + internal fun Any?.hasErrors(): Boolean { + val queue = ArrayDeque() + queue.add(this) + while (queue.isNotEmpty()) { + val current = queue.removeFirst() + when (current) { + is Error -> return true + is List<*> -> queue.addAll(current) + // Embedded fields can be represented as Maps + is Map<*, *> -> queue.addAll(current.values) + } + } + return false + } } diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt index bdc5c062..8c73a790 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt @@ -25,7 +25,6 @@ import com.apollographql.cache.normalized.api.NormalizedCache import com.apollographql.cache.normalized.api.NormalizedCacheFactory import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.RecordMerger -import com.apollographql.cache.normalized.api.hasErrors import com.apollographql.cache.normalized.api.propagateErrors import com.apollographql.cache.normalized.api.withErrors import com.apollographql.cache.normalized.cacheHeaders @@ -141,7 +140,7 @@ internal class DefaultApolloStore( @Suppress("UNCHECKED_CAST") val dataWithNulls: Map? = - if (dataWithErrors.hasErrors()) { + if (batchReaderData.hasErrors) { propagateErrors(dataWithErrors, operation.rootField(), errors) } else { dataWithErrors From 0acb6379f8a7e5ff28abfa96ad137b6455182fdf Mon Sep 17 00:00:00 2001 From: BoD Date: Thu, 27 Mar 2025 19:54:19 +0100 Subject: [PATCH 29/29] RecordSerializer: encode ints smaller than 255-32 as one byte --- .../sql/internal/RecordSerializer.kt | 179 +++++++++--------- 1 file changed, 87 insertions(+), 92 deletions(-) diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordSerializer.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordSerializer.kt index 6a80943c..79a22f6d 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordSerializer.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordSerializer.kt @@ -56,8 +56,8 @@ internal object RecordSerializer { private fun Buffer._writeInt(value: Int) { when (value) { - 0 -> { - writeByte(INT_0) + in 0.. { + writeByte(value) } in Byte.MIN_VALUE..Byte.MAX_VALUE -> { @@ -78,11 +78,12 @@ internal object RecordSerializer { } private fun Buffer._readInt(): Int { - return when (val what = readByte().toInt()) { - INT_0 -> 0 - INT_BYTE -> readByte().toInt() - INT_SHORT -> readShort().toInt() - INT_INT -> readInt() + val what = readByte().toInt() and 0xFF + return when { + what < FIRST -> what + what == INT_BYTE -> readByte().toInt() + what == INT_SHORT -> readShort().toInt() + what == INT_INT -> readInt() else -> error("Trying to read unsupported Int type: $what") } } @@ -115,17 +116,6 @@ internal object RecordSerializer { } } - private fun Buffer._readLong(): Long { - return when (val what = readByte().toInt()) { - LONG_0 -> 0L - LONG_BYTE -> readByte().toLong() - LONG_SHORT -> readShort().toLong() - LONG_INT -> readInt().toLong() - LONG_LONG -> readLong() - else -> error("Trying to read unsupported Long type: $what") - } - } - private fun Buffer.writeMap(value: Map<*, *>) { _writeInt(value.size) @Suppress("UNCHECKED_CAST") @@ -232,93 +222,98 @@ internal object RecordSerializer { } private fun Buffer.readAny(): RecordValue { - return when (val what = readByte().toInt()) { - STRING -> readString() - STRING_EMPTY -> "" - INT_0 -> 0 - INT_BYTE -> readByte().toInt() - INT_SHORT -> readShort().toInt() - INT_INT -> readInt() - LONG_0 -> 0L - LONG_BYTE -> readByte().toLong() - LONG_SHORT -> readShort().toLong() - LONG_INT -> readInt().toLong() - LONG_LONG -> readLong() - DOUBLE -> Double.fromBits(readLong()) - JSON_NUMBER -> JsonNumber(readString()) - BOOLEAN_TRUE -> true - BOOLEAN_FALSE -> false - CACHE_KEY -> { - CacheKey(readString()) - } + val what = readByte().toInt() and 0xFF + return if (what < FIRST) { + what + } else { + when (what) { + STRING -> readString() + STRING_EMPTY -> "" + INT_BYTE -> readByte().toInt() + INT_SHORT -> readShort().toInt() + INT_INT -> readInt() + LONG_0 -> 0L + LONG_BYTE -> readByte().toLong() + LONG_SHORT -> readShort().toLong() + LONG_INT -> readInt().toLong() + LONG_LONG -> readLong() + DOUBLE -> Double.fromBits(readLong()) + JSON_NUMBER -> JsonNumber(readString()) + BOOLEAN_TRUE -> true + BOOLEAN_FALSE -> false + CACHE_KEY -> { + CacheKey(readString()) + } - LIST -> { - val size = _readInt() - 0.until(size).map { - readAny() + LIST -> { + val size = _readInt() + 0.until(size).map { + readAny() + } } - } - LIST_EMPTY -> emptyList() + LIST_EMPTY -> emptyList() - MAP -> { - readMap() - } + MAP -> { + readMap() + } - MAP_EMPTY -> emptyMap() + MAP_EMPTY -> emptyMap() - NULL -> null + NULL -> null - ERROR -> { - val message = readString() - val locations = 0.until(_readInt()).map { - Error.Location(_readInt(), _readInt()) - } - val path = 0.until(_readInt()).map { - readAny()!! - } + ERROR -> { + val message = readString() + val locations = 0.until(_readInt()).map { + Error.Location(_readInt(), _readInt()) + } + val path = 0.until(_readInt()).map { + readAny()!! + } - @Suppress("UNCHECKED_CAST") - val extensions = readAny() as Map? - Builder(message = message) - .path(path) - .apply { - for ((key, value) in extensions.orEmpty()) { - putExtension(key, value) + @Suppress("UNCHECKED_CAST") + val extensions = readAny() as Map? + Builder(message = message) + .path(path) + .apply { + for ((key, value) in extensions.orEmpty()) { + putExtension(key, value) + } + if (locations.isNotEmpty()) { + locations(locations) + } } - if (locations.isNotEmpty()) { - locations(locations) - } - } - .build() - } + .build() + } - else -> error("Trying to read unsupported Record type: $what") + else -> error("Trying to read unsupported Record type: $what") + } } } - private const val NULL = 0 - private const val STRING = 1 - private const val STRING_EMPTY = 2 - private const val INT_0 = 3 - private const val INT_BYTE = 4 - private const val INT_SHORT = 5 - private const val INT_INT = 6 - private const val LONG_0 = 7 - private const val LONG_BYTE = 8 - private const val LONG_SHORT = 9 - private const val LONG_INT = 10 - private const val LONG_LONG = 11 - private const val BOOLEAN_TRUE = 12 - private const val BOOLEAN_FALSE = 13 - private const val DOUBLE = 14 - private const val JSON_NUMBER = 15 - private const val LIST = 16 - private const val LIST_EMPTY = 17 - private const val MAP = 18 - private const val MAP_EMPTY = 19 - private const val CACHE_KEY = 20 - private const val ERROR = 21 + private const val FIRST = 255 - 32 + + private const val NULL = FIRST + private const val STRING = FIRST + 1 + private const val STRING_EMPTY = FIRST + 2 + private const val INT_BYTE = FIRST + 3 + private const val INT_SHORT = FIRST + 4 + private const val INT_INT = FIRST + 5 + private const val LONG_0 = FIRST + 6 + private const val LONG_BYTE = FIRST + 7 + private const val LONG_SHORT = FIRST + 8 + private const val LONG_INT = FIRST + 9 + private const val LONG_LONG = FIRST + 10 + private const val BOOLEAN_TRUE = FIRST + 11 + private const val BOOLEAN_FALSE = FIRST + 12 + private const val DOUBLE = FIRST + 13 + private const val JSON_NUMBER = FIRST + 14 + private const val LIST = FIRST + 15 + private const val LIST_EMPTY = FIRST + 16 + private const val MAP = FIRST + 17 + private const val MAP_EMPTY = FIRST + 18 + private const val CACHE_KEY = FIRST + 19 + private const val ERROR = FIRST + 20 // Encode certain known metadata keys as single byte strings to save space private val knownMetadataKeys = mapOf(