diff --git a/CHANGELOG.md b/CHANGELOG.md index f6d26924..4a2ea130 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,10 @@ # Next version (unreleased) -PUT_CHANGELOG_HERE +- Storage binary format is changed to be a bit more compact +- Add `ApolloStore.trim()` to remove old data from the cache +- `CacheKey` is used in more APIs instead of `String`, for consistency. +- `ApolloCacheHeaders.EVICT_AFTER_READ` is removed. `ApolloStore.remove()` can be used instead. +- `NormalizedCache.remove(pattern: String)` is removed. Please open an issues if you need this feature back. # Version 0.0.7 _2025-03-03_ diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.api b/normalized-cache-incubating/api/normalized-cache-incubating.api index 4e6d4260..12312a9a 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.api @@ -5,18 +5,19 @@ public abstract interface class com/apollographql/cache/normalized/ApolloStore { public abstract fun dispose ()V public abstract fun dump ()Ljava/util/Map; public abstract fun getChangedKeys ()Lkotlinx/coroutines/flow/SharedFlow; - public abstract fun normalize (Lcom/apollographql/apollo/api/Executable;Ljava/util/Map;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;)Ljava/util/Map; + public abstract fun normalize-niOPdRo (Lcom/apollographql/apollo/api/Executable;Ljava/util/Map;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;)Ljava/util/Map; public abstract fun publish (Ljava/util/Set;Lkotlin/coroutines/Continuation;)Ljava/lang/Object; - public abstract fun readFragment (Lcom/apollographql/apollo/api/Fragment;Lcom/apollographql/cache/normalized/api/CacheKey;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/ApolloStore$ReadResult; + public abstract fun readFragment-dEpVOtE (Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/ApolloStore$ReadResult; public abstract fun readOperation (Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/apollo/api/ApolloResponse; - public abstract fun remove (Lcom/apollographql/cache/normalized/api/CacheKey;Z)Z public abstract fun remove (Ljava/util/List;Z)I + public abstract fun remove-eNSUWrY (Ljava/lang/String;Z)Z public abstract fun rollbackOptimisticUpdates (Ljava/util/UUID;)Ljava/util/Set; - public abstract fun writeFragment (Lcom/apollographql/apollo/api/Fragment;Lcom/apollographql/cache/normalized/api/CacheKey;Lcom/apollographql/apollo/api/Fragment$Data;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Set; + public abstract fun trim (JF)J + public abstract fun writeFragment-1qdIjGk (Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/Fragment$Data;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Set; public abstract fun writeOperation (Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/Operation$Data;Ljava/util/List;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Set; public abstract fun writeOperation (Lcom/apollographql/apollo/api/Operation;Ljava/util/Map;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Set; - public abstract fun writeOptimisticUpdates (Lcom/apollographql/apollo/api/Fragment;Lcom/apollographql/cache/normalized/api/CacheKey;Lcom/apollographql/apollo/api/Fragment$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;)Ljava/util/Set; public abstract fun writeOptimisticUpdates (Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/Operation$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;)Ljava/util/Set; + public abstract fun writeOptimisticUpdates-1qdIjGk (Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/Fragment$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;)Ljava/util/Set; } public final class com/apollographql/cache/normalized/ApolloStore$Companion { @@ -24,16 +25,17 @@ public final class com/apollographql/cache/normalized/ApolloStore$Companion { } public final class com/apollographql/cache/normalized/ApolloStore$DefaultImpls { - public static synthetic fun normalize$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Executable;Ljava/util/Map;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;ILjava/lang/Object;)Ljava/util/Map; - public static synthetic fun readFragment$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Lcom/apollographql/cache/normalized/api/CacheKey;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/ApolloStore$ReadResult; + public static synthetic fun normalize-niOPdRo$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Executable;Ljava/util/Map;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;ILjava/lang/Object;)Ljava/util/Map; + public static synthetic fun readFragment-dEpVOtE$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/ApolloStore$ReadResult; public static synthetic fun readOperation$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Lcom/apollographql/apollo/api/ApolloResponse; - public static synthetic fun remove$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/cache/normalized/api/CacheKey;ZILjava/lang/Object;)Z public static synthetic fun remove$default (Lcom/apollographql/cache/normalized/ApolloStore;Ljava/util/List;ZILjava/lang/Object;)I - public static synthetic fun writeFragment$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Lcom/apollographql/cache/normalized/api/CacheKey;Lcom/apollographql/apollo/api/Fragment$Data;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Ljava/util/Set; + public static synthetic fun remove-eNSUWrY$default (Lcom/apollographql/cache/normalized/ApolloStore;Ljava/lang/String;ZILjava/lang/Object;)Z + public static synthetic fun trim$default (Lcom/apollographql/cache/normalized/ApolloStore;JFILjava/lang/Object;)J + public static synthetic fun writeFragment-1qdIjGk$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/Fragment$Data;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Ljava/util/Set; public static synthetic fun writeOperation$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/Operation$Data;Ljava/util/List;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Ljava/util/Set; public static synthetic fun writeOperation$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Operation;Ljava/util/Map;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheHeaders;ILjava/lang/Object;)Ljava/util/Set; - public static synthetic fun writeOptimisticUpdates$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Lcom/apollographql/cache/normalized/api/CacheKey;Lcom/apollographql/apollo/api/Fragment$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;ILjava/lang/Object;)Ljava/util/Set; public static synthetic fun writeOptimisticUpdates$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Operation;Lcom/apollographql/apollo/api/Operation$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;ILjava/lang/Object;)Ljava/util/Set; + public static synthetic fun writeOptimisticUpdates-1qdIjGk$default (Lcom/apollographql/cache/normalized/ApolloStore;Lcom/apollographql/apollo/api/Fragment;Ljava/lang/String;Lcom/apollographql/apollo/api/Fragment$Data;Ljava/util/UUID;Lcom/apollographql/apollo/api/CustomScalarAdapters;ILjava/lang/Object;)Ljava/util/Set; } public final class com/apollographql/cache/normalized/ApolloStore$ReadResult { @@ -227,27 +229,28 @@ public final class com/apollographql/cache/normalized/api/CacheHeaders$Companion public final class com/apollographql/cache/normalized/api/CacheKey { public static final field Companion Lcom/apollographql/cache/normalized/api/CacheKey$Companion; - public fun (Ljava/lang/String;)V - public fun (Ljava/lang/String;Ljava/util/List;)V - public fun (Ljava/lang/String;[Ljava/lang/String;)V - public static final fun canDeserialize (Ljava/lang/String;)Z - public static final fun deserialize (Ljava/lang/String;)Lcom/apollographql/cache/normalized/api/CacheKey; + public static final synthetic fun box-impl (Ljava/lang/String;)Lcom/apollographql/cache/normalized/api/CacheKey; + public static fun constructor-impl (Ljava/lang/String;)Ljava/lang/String; + public static fun constructor-impl (Ljava/lang/String;Ljava/util/List;)Ljava/lang/String; + public static fun constructor-impl (Ljava/lang/String;[Ljava/lang/String;)Ljava/lang/String; public fun equals (Ljava/lang/Object;)Z + public static fun equals-impl (Ljava/lang/String;Ljava/lang/Object;)Z + public static final fun equals-impl0 (Ljava/lang/String;Ljava/lang/String;)Z public final fun getKey ()Ljava/lang/String; public fun hashCode ()I - public static final fun rootKey ()Lcom/apollographql/cache/normalized/api/CacheKey; - public final fun serialize ()Ljava/lang/String; + public static fun hashCode-impl (Ljava/lang/String;)I + public static final fun rootKey-mqw0cJ0 ()Ljava/lang/String; public fun toString ()Ljava/lang/String; + public static fun toString-impl (Ljava/lang/String;)Ljava/lang/String; + public final synthetic fun unbox-impl ()Ljava/lang/String; } public final class com/apollographql/cache/normalized/api/CacheKey$Companion { - public final fun canDeserialize (Ljava/lang/String;)Z - public final fun deserialize (Ljava/lang/String;)Lcom/apollographql/cache/normalized/api/CacheKey; - public final fun rootKey ()Lcom/apollographql/cache/normalized/api/CacheKey; + public final fun rootKey-mqw0cJ0 ()Ljava/lang/String; } public abstract interface class com/apollographql/cache/normalized/api/CacheKeyGenerator { - public abstract fun cacheKeyForObject (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Lcom/apollographql/cache/normalized/api/CacheKey; + public abstract fun cacheKeyForObject-z2_y8R0 (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Ljava/lang/String; } public final class com/apollographql/cache/normalized/api/CacheKeyGeneratorContext { @@ -256,9 +259,13 @@ public final class com/apollographql/cache/normalized/api/CacheKeyGeneratorConte public final fun getVariables ()Lcom/apollographql/apollo/api/Executable$Variables; } +public final class com/apollographql/cache/normalized/api/CacheKeyKt { + public static final fun isRootKey-pWl1Des (Ljava/lang/String;)Z +} + public abstract class com/apollographql/cache/normalized/api/CacheKeyResolver : com/apollographql/cache/normalized/api/CacheResolver { public fun ()V - public abstract fun cacheKeyForField (Lcom/apollographql/cache/normalized/api/ResolverContext;)Lcom/apollographql/cache/normalized/api/CacheKey; + public abstract fun cacheKeyForField-fLoEQYY (Lcom/apollographql/cache/normalized/api/ResolverContext;)Ljava/lang/String; public fun listOfCacheKeysForField (Lcom/apollographql/cache/normalized/api/ResolverContext;)Ljava/util/List; public final fun resolveField (Lcom/apollographql/cache/normalized/api/ResolverContext;)Ljava/lang/Object; } @@ -386,14 +393,14 @@ public final class com/apollographql/cache/normalized/api/IdCacheKeyGenerator : public fun ()V public fun ([Ljava/lang/String;)V public synthetic fun ([Ljava/lang/String;ILkotlin/jvm/internal/DefaultConstructorMarker;)V - public fun cacheKeyForObject (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Lcom/apollographql/cache/normalized/api/CacheKey; + public fun cacheKeyForObject-z2_y8R0 (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Ljava/lang/String; } public final class com/apollographql/cache/normalized/api/IdCacheKeyResolver : com/apollographql/cache/normalized/api/CacheKeyResolver { public fun ()V public fun (Ljava/util/List;Ljava/util/List;)V public synthetic fun (Ljava/util/List;Ljava/util/List;ILkotlin/jvm/internal/DefaultConstructorMarker;)V - public fun cacheKeyForField (Lcom/apollographql/cache/normalized/api/ResolverContext;)Lcom/apollographql/cache/normalized/api/CacheKey; + public fun cacheKeyForField-fLoEQYY (Lcom/apollographql/cache/normalized/api/ResolverContext;)Ljava/lang/String; public fun listOfCacheKeysForField (Lcom/apollographql/cache/normalized/api/ResolverContext;)Ljava/util/List; } @@ -446,15 +453,20 @@ public abstract interface class com/apollographql/cache/normalized/api/Normalize public abstract fun merge (Lcom/apollographql/cache/normalized/api/Record;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public abstract fun merge (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public static fun prettifyDump (Ljava/util/Map;)Ljava/lang/String; - public abstract fun remove (Lcom/apollographql/cache/normalized/api/CacheKey;Z)Z - public abstract fun remove (Ljava/lang/String;)I public abstract fun remove (Ljava/util/Collection;Z)I + public abstract fun remove-eNSUWrY (Ljava/lang/String;Z)Z + public abstract fun trim (JF)J } public final class com/apollographql/cache/normalized/api/NormalizedCache$Companion { public final fun prettifyDump (Ljava/util/Map;)Ljava/lang/String; } +public final class com/apollographql/cache/normalized/api/NormalizedCache$DefaultImpls { + public static fun trim (Lcom/apollographql/cache/normalized/api/NormalizedCache;JF)J + public static synthetic fun trim$default (Lcom/apollographql/cache/normalized/api/NormalizedCache;JFILjava/lang/Object;)J +} + public abstract class com/apollographql/cache/normalized/api/NormalizedCacheFactory { public fun ()V public abstract fun create ()Lcom/apollographql/cache/normalized/api/NormalizedCache; @@ -462,14 +474,14 @@ public abstract class com/apollographql/cache/normalized/api/NormalizedCacheFact public abstract interface class com/apollographql/cache/normalized/api/ReadOnlyNormalizedCache { public abstract fun dump ()Ljava/util/Map; - public abstract fun loadRecord (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; + public abstract fun loadRecord-eNSUWrY (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; public abstract fun loadRecords (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Collection; } public final class com/apollographql/cache/normalized/api/Record : java/util/Map, kotlin/jvm/internal/markers/KMappedMarker { public static final field Companion Lcom/apollographql/cache/normalized/api/Record$Companion; - public fun (Ljava/lang/String;Ljava/util/Map;Ljava/util/UUID;Ljava/util/Map;)V public synthetic fun (Ljava/lang/String;Ljava/util/Map;Ljava/util/UUID;Ljava/util/Map;ILkotlin/jvm/internal/DefaultConstructorMarker;)V + public synthetic fun (Ljava/lang/String;Ljava/util/Map;Ljava/util/UUID;Ljava/util/Map;Lkotlin/jvm/internal/DefaultConstructorMarker;)V public fun clear ()V public synthetic fun compute (Ljava/lang/Object;Ljava/util/function/BiFunction;)Ljava/lang/Object; public fun compute (Ljava/lang/String;Ljava/util/function/BiFunction;)Ljava/lang/Object; @@ -486,7 +498,7 @@ public final class com/apollographql/cache/normalized/api/Record : java/util/Map public fun get (Ljava/lang/String;)Ljava/lang/Object; public fun getEntries ()Ljava/util/Set; public final fun getFields ()Ljava/util/Map; - public final fun getKey ()Ljava/lang/String; + public final fun getKey-mqw0cJ0 ()Ljava/lang/String; public fun getKeys ()Ljava/util/Set; public final fun getMetadata ()Ljava/util/Map; public final fun getMutationId ()Ljava/util/UUID; @@ -541,12 +553,12 @@ public final class com/apollographql/cache/normalized/api/RecordMergerKt { } public final class com/apollographql/cache/normalized/api/ResolverContext { - public fun (Lcom/apollographql/apollo/api/CompiledField;Lcom/apollographql/apollo/api/Executable$Variables;Ljava/util/Map;Ljava/lang/String;Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Ljava/util/List;)V + public synthetic fun (Lcom/apollographql/apollo/api/CompiledField;Lcom/apollographql/apollo/api/Executable$Variables;Ljava/util/Map;Ljava/lang/String;Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Ljava/util/List;Lkotlin/jvm/internal/DefaultConstructorMarker;)V public final fun getCacheHeaders ()Lcom/apollographql/cache/normalized/api/CacheHeaders; public final fun getField ()Lcom/apollographql/apollo/api/CompiledField; public final fun getFieldKeyGenerator ()Lcom/apollographql/cache/normalized/api/FieldKeyGenerator; public final fun getParent ()Ljava/util/Map; - public final fun getParentKey ()Ljava/lang/String; + public final fun getParentKey-mqw0cJ0 ()Ljava/lang/String; public final fun getParentType ()Ljava/lang/String; public final fun getPath ()Ljava/util/List; public final fun getVariables ()Lcom/apollographql/apollo/api/Executable$Variables; @@ -559,14 +571,14 @@ public final class com/apollographql/cache/normalized/api/SchemaCoordinatesMaxAg public final class com/apollographql/cache/normalized/api/TypePolicyCacheKeyGenerator : com/apollographql/cache/normalized/api/CacheKeyGenerator { public static final field INSTANCE Lcom/apollographql/cache/normalized/api/TypePolicyCacheKeyGenerator; - public fun cacheKeyForObject (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Lcom/apollographql/cache/normalized/api/CacheKey; + public fun cacheKeyForObject-z2_y8R0 (Ljava/util/Map;Lcom/apollographql/cache/normalized/api/CacheKeyGeneratorContext;)Ljava/lang/String; } public final class com/apollographql/cache/normalized/internal/NormalizerKt { - public static final fun normalized (Lcom/apollographql/apollo/api/Executable$Data;Lcom/apollographql/apollo/api/Executable;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;)Ljava/util/Map; - public static final fun normalized (Ljava/util/Map;Lcom/apollographql/apollo/api/Executable;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;)Ljava/util/Map; - public static synthetic fun normalized$default (Lcom/apollographql/apollo/api/Executable$Data;Lcom/apollographql/apollo/api/Executable;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;ILjava/lang/Object;)Ljava/util/Map; - public static synthetic fun normalized$default (Ljava/util/Map;Lcom/apollographql/apollo/api/Executable;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;ILjava/lang/Object;)Ljava/util/Map; + public static final fun normalized-MplSeLY (Lcom/apollographql/apollo/api/Executable$Data;Lcom/apollographql/apollo/api/Executable;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;)Ljava/util/Map; + public static final fun normalized-MplSeLY (Ljava/util/Map;Lcom/apollographql/apollo/api/Executable;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;)Ljava/util/Map; + public static synthetic fun normalized-MplSeLY$default (Lcom/apollographql/apollo/api/Executable$Data;Lcom/apollographql/apollo/api/Executable;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;ILjava/lang/Object;)Ljava/util/Map; + public static synthetic fun normalized-MplSeLY$default (Ljava/util/Map;Lcom/apollographql/apollo/api/Executable;Ljava/lang/String;Lcom/apollographql/apollo/api/CustomScalarAdapters;Lcom/apollographql/cache/normalized/api/CacheKeyGenerator;Lcom/apollographql/cache/normalized/api/MetadataGenerator;Lcom/apollographql/cache/normalized/api/FieldKeyGenerator;Lcom/apollographql/cache/normalized/api/EmbeddedFieldsProvider;ILjava/lang/Object;)Ljava/util/Map; } public final class com/apollographql/cache/normalized/memory/MemoryCache : com/apollographql/cache/normalized/api/NormalizedCache { @@ -576,13 +588,13 @@ public final class com/apollographql/cache/normalized/memory/MemoryCache : com/a public fun clearAll ()V public fun dump ()Ljava/util/Map; public final fun getSize ()I - public fun loadRecord (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; + public fun loadRecord-eNSUWrY (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; public fun loadRecords (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Collection; public fun merge (Lcom/apollographql/cache/normalized/api/Record;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public fun merge (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; - public fun remove (Lcom/apollographql/cache/normalized/api/CacheKey;Z)Z - public fun remove (Ljava/lang/String;)I public fun remove (Ljava/util/Collection;Z)I + public fun remove-eNSUWrY (Ljava/lang/String;Z)Z + public fun trim (JF)J } public final class com/apollographql/cache/normalized/memory/MemoryCacheFactory : com/apollographql/cache/normalized/api/NormalizedCacheFactory { diff --git a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api index d11ce69c..f4822de5 100644 --- a/normalized-cache-incubating/api/normalized-cache-incubating.klib.api +++ b/normalized-cache-incubating/api/normalized-cache-incubating.klib.api @@ -59,17 +59,17 @@ abstract interface com.apollographql.cache.normalized.api/NormalizedCache : com. abstract fun merge(kotlin.collections/Collection, com.apollographql.cache.normalized.api/CacheHeaders, com.apollographql.cache.normalized.api/RecordMerger): kotlin.collections/Set // com.apollographql.cache.normalized.api/NormalizedCache.merge|merge(kotlin.collections.Collection;com.apollographql.cache.normalized.api.CacheHeaders;com.apollographql.cache.normalized.api.RecordMerger){}[0] abstract fun remove(com.apollographql.cache.normalized.api/CacheKey, kotlin/Boolean): kotlin/Boolean // com.apollographql.cache.normalized.api/NormalizedCache.remove|remove(com.apollographql.cache.normalized.api.CacheKey;kotlin.Boolean){}[0] abstract fun remove(kotlin.collections/Collection, kotlin/Boolean): kotlin/Int // com.apollographql.cache.normalized.api/NormalizedCache.remove|remove(kotlin.collections.Collection;kotlin.Boolean){}[0] - abstract fun remove(kotlin/String): kotlin/Int // com.apollographql.cache.normalized.api/NormalizedCache.remove|remove(kotlin.String){}[0] + open fun trim(kotlin/Long, kotlin/Float = ...): kotlin/Long // com.apollographql.cache.normalized.api/NormalizedCache.trim|trim(kotlin.Long;kotlin.Float){}[0] final object Companion { // com.apollographql.cache.normalized.api/NormalizedCache.Companion|null[0] - final fun prettifyDump(kotlin.collections/Map, kotlin.collections/Map>): kotlin/String // com.apollographql.cache.normalized.api/NormalizedCache.Companion.prettifyDump|prettifyDump(kotlin.collections.Map,kotlin.collections.Map>){}[0] + final fun prettifyDump(kotlin.collections/Map, kotlin.collections/Map>): kotlin/String // com.apollographql.cache.normalized.api/NormalizedCache.Companion.prettifyDump|prettifyDump(kotlin.collections.Map,kotlin.collections.Map>){}[0] } } abstract interface com.apollographql.cache.normalized.api/ReadOnlyNormalizedCache { // com.apollographql.cache.normalized.api/ReadOnlyNormalizedCache|null[0] - abstract fun dump(): kotlin.collections/Map, kotlin.collections/Map> // com.apollographql.cache.normalized.api/ReadOnlyNormalizedCache.dump|dump(){}[0] - abstract fun loadRecord(kotlin/String, com.apollographql.cache.normalized.api/CacheHeaders): com.apollographql.cache.normalized.api/Record? // com.apollographql.cache.normalized.api/ReadOnlyNormalizedCache.loadRecord|loadRecord(kotlin.String;com.apollographql.cache.normalized.api.CacheHeaders){}[0] - abstract fun loadRecords(kotlin.collections/Collection, com.apollographql.cache.normalized.api/CacheHeaders): kotlin.collections/Collection // com.apollographql.cache.normalized.api/ReadOnlyNormalizedCache.loadRecords|loadRecords(kotlin.collections.Collection;com.apollographql.cache.normalized.api.CacheHeaders){}[0] + abstract fun dump(): kotlin.collections/Map, kotlin.collections/Map> // com.apollographql.cache.normalized.api/ReadOnlyNormalizedCache.dump|dump(){}[0] + abstract fun loadRecord(com.apollographql.cache.normalized.api/CacheKey, com.apollographql.cache.normalized.api/CacheHeaders): com.apollographql.cache.normalized.api/Record? // com.apollographql.cache.normalized.api/ReadOnlyNormalizedCache.loadRecord|loadRecord(com.apollographql.cache.normalized.api.CacheKey;com.apollographql.cache.normalized.api.CacheHeaders){}[0] + abstract fun loadRecords(kotlin.collections/Collection, com.apollographql.cache.normalized.api/CacheHeaders): kotlin.collections/Collection // com.apollographql.cache.normalized.api/ReadOnlyNormalizedCache.loadRecords|loadRecords(kotlin.collections.Collection;com.apollographql.cache.normalized.api.CacheHeaders){}[0] } abstract interface com.apollographql.cache.normalized.api/RecordMerger { // com.apollographql.cache.normalized.api/RecordMerger|null[0] @@ -80,7 +80,7 @@ abstract interface com.apollographql.cache.normalized/ApolloStore { // com.apoll abstract val changedKeys // com.apollographql.cache.normalized/ApolloStore.changedKeys|{}changedKeys[0] abstract fun (): kotlinx.coroutines.flow/SharedFlow> // com.apollographql.cache.normalized/ApolloStore.changedKeys.|(){}[0] - abstract fun <#A1: com.apollographql.apollo.api/Executable.Data> normalize(com.apollographql.apollo.api/Executable<#A1>, kotlin.collections/Map, kotlin/String = ..., com.apollographql.apollo.api/CustomScalarAdapters = ...): kotlin.collections/Map // com.apollographql.cache.normalized/ApolloStore.normalize|normalize(com.apollographql.apollo.api.Executable<0:0>;kotlin.collections.Map;kotlin.String;com.apollographql.apollo.api.CustomScalarAdapters){0§}[0] + abstract fun <#A1: com.apollographql.apollo.api/Executable.Data> normalize(com.apollographql.apollo.api/Executable<#A1>, kotlin.collections/Map, com.apollographql.cache.normalized.api/CacheKey = ..., com.apollographql.apollo.api/CustomScalarAdapters = ...): kotlin.collections/Map // com.apollographql.cache.normalized/ApolloStore.normalize|normalize(com.apollographql.apollo.api.Executable<0:0>;kotlin.collections.Map;com.apollographql.cache.normalized.api.CacheKey;com.apollographql.apollo.api.CustomScalarAdapters){0§}[0] abstract fun <#A1: com.apollographql.apollo.api/Fragment.Data> readFragment(com.apollographql.apollo.api/Fragment<#A1>, com.apollographql.cache.normalized.api/CacheKey, com.apollographql.apollo.api/CustomScalarAdapters = ..., com.apollographql.cache.normalized.api/CacheHeaders = ...): com.apollographql.cache.normalized/ApolloStore.ReadResult<#A1> // com.apollographql.cache.normalized/ApolloStore.readFragment|readFragment(com.apollographql.apollo.api.Fragment<0:0>;com.apollographql.cache.normalized.api.CacheKey;com.apollographql.apollo.api.CustomScalarAdapters;com.apollographql.cache.normalized.api.CacheHeaders){0§}[0] abstract fun <#A1: com.apollographql.apollo.api/Fragment.Data> writeFragment(com.apollographql.apollo.api/Fragment<#A1>, com.apollographql.cache.normalized.api/CacheKey, #A1, com.apollographql.apollo.api/CustomScalarAdapters = ..., com.apollographql.cache.normalized.api/CacheHeaders = ...): kotlin.collections/Set // com.apollographql.cache.normalized/ApolloStore.writeFragment|writeFragment(com.apollographql.apollo.api.Fragment<0:0>;com.apollographql.cache.normalized.api.CacheKey;0:0;com.apollographql.apollo.api.CustomScalarAdapters;com.apollographql.cache.normalized.api.CacheHeaders){0§}[0] abstract fun <#A1: com.apollographql.apollo.api/Fragment.Data> writeOptimisticUpdates(com.apollographql.apollo.api/Fragment<#A1>, com.apollographql.cache.normalized.api/CacheKey, #A1, com.benasher44.uuid/Uuid, com.apollographql.apollo.api/CustomScalarAdapters = ...): kotlin.collections/Set // com.apollographql.cache.normalized/ApolloStore.writeOptimisticUpdates|writeOptimisticUpdates(com.apollographql.apollo.api.Fragment<0:0>;com.apollographql.cache.normalized.api.CacheKey;0:0;com.benasher44.uuid.Uuid;com.apollographql.apollo.api.CustomScalarAdapters){0§}[0] @@ -91,10 +91,11 @@ abstract interface com.apollographql.cache.normalized/ApolloStore { // com.apoll abstract fun <#A1: kotlin/Any?> accessCache(kotlin/Function1): #A1 // com.apollographql.cache.normalized/ApolloStore.accessCache|accessCache(kotlin.Function1){0§}[0] abstract fun clearAll(): kotlin/Boolean // com.apollographql.cache.normalized/ApolloStore.clearAll|clearAll(){}[0] abstract fun dispose() // com.apollographql.cache.normalized/ApolloStore.dispose|dispose(){}[0] - abstract fun dump(): kotlin.collections/Map, kotlin.collections/Map> // com.apollographql.cache.normalized/ApolloStore.dump|dump(){}[0] + abstract fun dump(): kotlin.collections/Map, kotlin.collections/Map> // com.apollographql.cache.normalized/ApolloStore.dump|dump(){}[0] abstract fun remove(com.apollographql.cache.normalized.api/CacheKey, kotlin/Boolean = ...): kotlin/Boolean // com.apollographql.cache.normalized/ApolloStore.remove|remove(com.apollographql.cache.normalized.api.CacheKey;kotlin.Boolean){}[0] abstract fun remove(kotlin.collections/List, kotlin/Boolean = ...): kotlin/Int // com.apollographql.cache.normalized/ApolloStore.remove|remove(kotlin.collections.List;kotlin.Boolean){}[0] abstract fun rollbackOptimisticUpdates(com.benasher44.uuid/Uuid): kotlin.collections/Set // com.apollographql.cache.normalized/ApolloStore.rollbackOptimisticUpdates|rollbackOptimisticUpdates(com.benasher44.uuid.Uuid){}[0] + abstract fun trim(kotlin/Long, kotlin/Float = ...): kotlin/Long // com.apollographql.cache.normalized/ApolloStore.trim|trim(kotlin.Long;kotlin.Float){}[0] abstract suspend fun publish(kotlin.collections/Set) // com.apollographql.cache.normalized/ApolloStore.publish|publish(kotlin.collections.Set){}[0] final class <#A1: com.apollographql.apollo.api/Executable.Data> ReadResult { // com.apollographql.cache.normalized/ApolloStore.ReadResult|null[0] @@ -170,26 +171,6 @@ final class com.apollographql.cache.normalized.api/CacheHeaders { // com.apollog } } -final class com.apollographql.cache.normalized.api/CacheKey { // com.apollographql.cache.normalized.api/CacheKey|null[0] - constructor (kotlin/String) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String){}[0] - constructor (kotlin/String, kotlin.collections/List) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String;kotlin.collections.List){}[0] - constructor (kotlin/String, kotlin/Array...) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String;kotlin.Array...){}[0] - - final val key // com.apollographql.cache.normalized.api/CacheKey.key|{}key[0] - final fun (): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.key.|(){}[0] - - final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.api/CacheKey.equals|equals(kotlin.Any?){}[0] - final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.api/CacheKey.hashCode|hashCode(){}[0] - final fun serialize(): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.serialize|serialize(){}[0] - final fun toString(): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.toString|toString(){}[0] - - final object Companion { // com.apollographql.cache.normalized.api/CacheKey.Companion|null[0] - final fun canDeserialize(kotlin/String): kotlin/Boolean // com.apollographql.cache.normalized.api/CacheKey.Companion.canDeserialize|canDeserialize(kotlin.String){}[0] - final fun deserialize(kotlin/String): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.api/CacheKey.Companion.deserialize|deserialize(kotlin.String){}[0] - final fun rootKey(): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.api/CacheKey.Companion.rootKey|rootKey(){}[0] - } -} - final class com.apollographql.cache.normalized.api/CacheKeyGeneratorContext { // com.apollographql.cache.normalized.api/CacheKeyGeneratorContext|null[0] constructor (com.apollographql.apollo.api/CompiledField, com.apollographql.apollo.api/Executable.Variables) // com.apollographql.cache.normalized.api/CacheKeyGeneratorContext.|(com.apollographql.apollo.api.CompiledField;com.apollographql.apollo.api.Executable.Variables){}[0] @@ -315,14 +296,14 @@ final class com.apollographql.cache.normalized.api/MetadataGeneratorContext { // } final class com.apollographql.cache.normalized.api/Record : kotlin.collections/Map { // com.apollographql.cache.normalized.api/Record|null[0] - constructor (kotlin/String, kotlin.collections/Map, com.benasher44.uuid/Uuid? = ..., kotlin.collections/Map> = ...) // com.apollographql.cache.normalized.api/Record.|(kotlin.String;kotlin.collections.Map;com.benasher44.uuid.Uuid?;kotlin.collections.Map>){}[0] + constructor (com.apollographql.cache.normalized.api/CacheKey, kotlin.collections/Map, com.benasher44.uuid/Uuid? = ..., kotlin.collections/Map> = ...) // com.apollographql.cache.normalized.api/Record.|(com.apollographql.cache.normalized.api.CacheKey;kotlin.collections.Map;com.benasher44.uuid.Uuid?;kotlin.collections.Map>){}[0] final val entries // com.apollographql.cache.normalized.api/Record.entries|{}entries[0] final fun (): kotlin.collections/Set> // com.apollographql.cache.normalized.api/Record.entries.|(){}[0] final val fields // com.apollographql.cache.normalized.api/Record.fields|{}fields[0] final fun (): kotlin.collections/Map // com.apollographql.cache.normalized.api/Record.fields.|(){}[0] final val key // com.apollographql.cache.normalized.api/Record.key|{}key[0] - final fun (): kotlin/String // com.apollographql.cache.normalized.api/Record.key.|(){}[0] + final fun (): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.api/Record.key.|(){}[0] final val keys // com.apollographql.cache.normalized.api/Record.keys|{}keys[0] final fun (): kotlin.collections/Set // com.apollographql.cache.normalized.api/Record.keys.|(){}[0] final val metadata // com.apollographql.cache.normalized.api/Record.metadata|{}metadata[0] @@ -362,7 +343,7 @@ final class com.apollographql.cache.normalized.api/RecordMergerContext { // com. } final class com.apollographql.cache.normalized.api/ResolverContext { // com.apollographql.cache.normalized.api/ResolverContext|null[0] - constructor (com.apollographql.apollo.api/CompiledField, com.apollographql.apollo.api/Executable.Variables, kotlin.collections/Map, kotlin/String, kotlin/String, com.apollographql.cache.normalized.api/CacheHeaders, com.apollographql.cache.normalized.api/FieldKeyGenerator, kotlin.collections/List) // com.apollographql.cache.normalized.api/ResolverContext.|(com.apollographql.apollo.api.CompiledField;com.apollographql.apollo.api.Executable.Variables;kotlin.collections.Map;kotlin.String;kotlin.String;com.apollographql.cache.normalized.api.CacheHeaders;com.apollographql.cache.normalized.api.FieldKeyGenerator;kotlin.collections.List){}[0] + constructor (com.apollographql.apollo.api/CompiledField, com.apollographql.apollo.api/Executable.Variables, kotlin.collections/Map, com.apollographql.cache.normalized.api/CacheKey, kotlin/String, com.apollographql.cache.normalized.api/CacheHeaders, com.apollographql.cache.normalized.api/FieldKeyGenerator, kotlin.collections/List) // com.apollographql.cache.normalized.api/ResolverContext.|(com.apollographql.apollo.api.CompiledField;com.apollographql.apollo.api.Executable.Variables;kotlin.collections.Map;com.apollographql.cache.normalized.api.CacheKey;kotlin.String;com.apollographql.cache.normalized.api.CacheHeaders;com.apollographql.cache.normalized.api.FieldKeyGenerator;kotlin.collections.List){}[0] final val cacheHeaders // com.apollographql.cache.normalized.api/ResolverContext.cacheHeaders|{}cacheHeaders[0] final fun (): com.apollographql.cache.normalized.api/CacheHeaders // com.apollographql.cache.normalized.api/ResolverContext.cacheHeaders.|(){}[0] @@ -373,7 +354,7 @@ final class com.apollographql.cache.normalized.api/ResolverContext { // com.apol final val parent // com.apollographql.cache.normalized.api/ResolverContext.parent|{}parent[0] final fun (): kotlin.collections/Map // com.apollographql.cache.normalized.api/ResolverContext.parent.|(){}[0] final val parentKey // com.apollographql.cache.normalized.api/ResolverContext.parentKey|{}parentKey[0] - final fun (): kotlin/String // com.apollographql.cache.normalized.api/ResolverContext.parentKey.|(){}[0] + final fun (): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.api/ResolverContext.parentKey.|(){}[0] final val parentType // com.apollographql.cache.normalized.api/ResolverContext.parentType|{}parentType[0] final fun (): kotlin/String // com.apollographql.cache.normalized.api/ResolverContext.parentType.|(){}[0] final val path // com.apollographql.cache.normalized.api/ResolverContext.path|{}path[0] @@ -395,14 +376,14 @@ final class com.apollographql.cache.normalized.memory/MemoryCache : com.apollogr final fun (): kotlin/Int // com.apollographql.cache.normalized.memory/MemoryCache.size.|(){}[0] final fun clearAll() // com.apollographql.cache.normalized.memory/MemoryCache.clearAll|clearAll(){}[0] - final fun dump(): kotlin.collections/Map, kotlin.collections/Map> // com.apollographql.cache.normalized.memory/MemoryCache.dump|dump(){}[0] - final fun loadRecord(kotlin/String, com.apollographql.cache.normalized.api/CacheHeaders): com.apollographql.cache.normalized.api/Record? // com.apollographql.cache.normalized.memory/MemoryCache.loadRecord|loadRecord(kotlin.String;com.apollographql.cache.normalized.api.CacheHeaders){}[0] - final fun loadRecords(kotlin.collections/Collection, com.apollographql.cache.normalized.api/CacheHeaders): kotlin.collections/Collection // com.apollographql.cache.normalized.memory/MemoryCache.loadRecords|loadRecords(kotlin.collections.Collection;com.apollographql.cache.normalized.api.CacheHeaders){}[0] + final fun dump(): kotlin.collections/Map, kotlin.collections/Map> // com.apollographql.cache.normalized.memory/MemoryCache.dump|dump(){}[0] + final fun loadRecord(com.apollographql.cache.normalized.api/CacheKey, com.apollographql.cache.normalized.api/CacheHeaders): com.apollographql.cache.normalized.api/Record? // com.apollographql.cache.normalized.memory/MemoryCache.loadRecord|loadRecord(com.apollographql.cache.normalized.api.CacheKey;com.apollographql.cache.normalized.api.CacheHeaders){}[0] + final fun loadRecords(kotlin.collections/Collection, com.apollographql.cache.normalized.api/CacheHeaders): kotlin.collections/Collection // com.apollographql.cache.normalized.memory/MemoryCache.loadRecords|loadRecords(kotlin.collections.Collection;com.apollographql.cache.normalized.api.CacheHeaders){}[0] final fun merge(com.apollographql.cache.normalized.api/Record, com.apollographql.cache.normalized.api/CacheHeaders, com.apollographql.cache.normalized.api/RecordMerger): kotlin.collections/Set // com.apollographql.cache.normalized.memory/MemoryCache.merge|merge(com.apollographql.cache.normalized.api.Record;com.apollographql.cache.normalized.api.CacheHeaders;com.apollographql.cache.normalized.api.RecordMerger){}[0] final fun merge(kotlin.collections/Collection, com.apollographql.cache.normalized.api/CacheHeaders, com.apollographql.cache.normalized.api/RecordMerger): kotlin.collections/Set // com.apollographql.cache.normalized.memory/MemoryCache.merge|merge(kotlin.collections.Collection;com.apollographql.cache.normalized.api.CacheHeaders;com.apollographql.cache.normalized.api.RecordMerger){}[0] final fun remove(com.apollographql.cache.normalized.api/CacheKey, kotlin/Boolean): kotlin/Boolean // com.apollographql.cache.normalized.memory/MemoryCache.remove|remove(com.apollographql.cache.normalized.api.CacheKey;kotlin.Boolean){}[0] final fun remove(kotlin.collections/Collection, kotlin/Boolean): kotlin/Int // com.apollographql.cache.normalized.memory/MemoryCache.remove|remove(kotlin.collections.Collection;kotlin.Boolean){}[0] - final fun remove(kotlin/String): kotlin/Int // com.apollographql.cache.normalized.memory/MemoryCache.remove|remove(kotlin.String){}[0] + final fun trim(kotlin/Long, kotlin/Float): kotlin/Long // com.apollographql.cache.normalized.memory/MemoryCache.trim|trim(kotlin.Long;kotlin.Float){}[0] } final class com.apollographql.cache.normalized.memory/MemoryCacheFactory : com.apollographql.cache.normalized.api/NormalizedCacheFactory { // com.apollographql.cache.normalized.memory/MemoryCacheFactory|null[0] @@ -480,6 +461,23 @@ final class com.apollographql.cache.normalized/RemovedFieldsAndRecords { // com. final fun (): kotlin.collections/Set // com.apollographql.cache.normalized/RemovedFieldsAndRecords.removedRecords.|(){}[0] } +final value class com.apollographql.cache.normalized.api/CacheKey { // com.apollographql.cache.normalized.api/CacheKey|null[0] + constructor (kotlin/String) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String){}[0] + constructor (kotlin/String, kotlin.collections/List) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String;kotlin.collections.List){}[0] + constructor (kotlin/String, kotlin/Array...) // com.apollographql.cache.normalized.api/CacheKey.|(kotlin.String;kotlin.Array...){}[0] + + final val key // com.apollographql.cache.normalized.api/CacheKey.key|{}key[0] + final fun (): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.key.|(){}[0] + + final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.api/CacheKey.equals|equals(kotlin.Any?){}[0] + final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.api/CacheKey.hashCode|hashCode(){}[0] + final fun toString(): kotlin/String // com.apollographql.cache.normalized.api/CacheKey.toString|toString(){}[0] + + final object Companion { // com.apollographql.cache.normalized.api/CacheKey.Companion|null[0] + final fun rootKey(): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.api/CacheKey.Companion.rootKey|rootKey(){}[0] + } +} + final object com.apollographql.cache.normalized.api/ApolloCacheHeaders { // com.apollographql.cache.normalized.api/ApolloCacheHeaders|null[0] final const val DO_NOT_STORE // com.apollographql.cache.normalized.api/ApolloCacheHeaders.DO_NOT_STORE|{}DO_NOT_STORE[0] final fun (): kotlin/String // com.apollographql.cache.normalized.api/ApolloCacheHeaders.DO_NOT_STORE.|(){}[0] @@ -558,7 +556,8 @@ final val com.apollographql.cache.normalized/isFromCache // com.apollographql.ca final fun (com.apollographql.apollo/ApolloClient.Builder).com.apollographql.cache.normalized/logCacheMisses(kotlin/Function1 = ...): com.apollographql.apollo/ApolloClient.Builder // com.apollographql.cache.normalized/logCacheMisses|logCacheMisses@com.apollographql.apollo.ApolloClient.Builder(kotlin.Function1){}[0] final fun (com.apollographql.apollo/ApolloClient.Builder).com.apollographql.cache.normalized/normalizedCache(com.apollographql.cache.normalized.api/NormalizedCacheFactory, com.apollographql.cache.normalized.api/CacheKeyGenerator = ..., com.apollographql.cache.normalized.api/MetadataGenerator = ..., com.apollographql.cache.normalized.api/CacheResolver = ..., com.apollographql.cache.normalized.api/RecordMerger = ..., com.apollographql.cache.normalized.api/FieldKeyGenerator = ..., com.apollographql.cache.normalized.api/EmbeddedFieldsProvider = ..., kotlin/Boolean = ...): com.apollographql.apollo/ApolloClient.Builder // com.apollographql.cache.normalized/normalizedCache|normalizedCache@com.apollographql.apollo.ApolloClient.Builder(com.apollographql.cache.normalized.api.NormalizedCacheFactory;com.apollographql.cache.normalized.api.CacheKeyGenerator;com.apollographql.cache.normalized.api.MetadataGenerator;com.apollographql.cache.normalized.api.CacheResolver;com.apollographql.cache.normalized.api.RecordMerger;com.apollographql.cache.normalized.api.FieldKeyGenerator;com.apollographql.cache.normalized.api.EmbeddedFieldsProvider;kotlin.Boolean){}[0] final fun (com.apollographql.apollo/ApolloClient.Builder).com.apollographql.cache.normalized/store(com.apollographql.cache.normalized/ApolloStore, kotlin/Boolean = ...): com.apollographql.apollo/ApolloClient.Builder // com.apollographql.cache.normalized/store|store@com.apollographql.apollo.ApolloClient.Builder(com.apollographql.cache.normalized.ApolloStore;kotlin.Boolean){}[0] -final fun (com.apollographql.cache.normalized.api/NormalizedCache).com.apollographql.cache.normalized/allRecords(): kotlin.collections/Map // com.apollographql.cache.normalized/allRecords|allRecords@com.apollographql.cache.normalized.api.NormalizedCache(){}[0] +final fun (com.apollographql.cache.normalized.api/CacheKey).com.apollographql.cache.normalized.api/isRootKey(): kotlin/Boolean // com.apollographql.cache.normalized.api/isRootKey|isRootKey@com.apollographql.cache.normalized.api.CacheKey(){}[0] +final fun (com.apollographql.cache.normalized.api/NormalizedCache).com.apollographql.cache.normalized/allRecords(): kotlin.collections/Map // com.apollographql.cache.normalized/allRecords|allRecords@com.apollographql.cache.normalized.api.NormalizedCache(){}[0] final fun (com.apollographql.cache.normalized.api/NormalizedCache).com.apollographql.cache.normalized/garbageCollect(com.apollographql.cache.normalized.api/MaxAgeProvider, kotlin.time/Duration = ...): com.apollographql.cache.normalized/GarbageCollectResult // com.apollographql.cache.normalized/garbageCollect|garbageCollect@com.apollographql.cache.normalized.api.NormalizedCache(com.apollographql.cache.normalized.api.MaxAgeProvider;kotlin.time.Duration){}[0] final fun (com.apollographql.cache.normalized.api/NormalizedCache).com.apollographql.cache.normalized/removeDanglingReferences(): com.apollographql.cache.normalized/RemovedFieldsAndRecords // com.apollographql.cache.normalized/removeDanglingReferences|removeDanglingReferences@com.apollographql.cache.normalized.api.NormalizedCache(){}[0] final fun (com.apollographql.cache.normalized.api/NormalizedCache).com.apollographql.cache.normalized/removeStaleFields(com.apollographql.cache.normalized.api/MaxAgeProvider, kotlin.time/Duration = ...): com.apollographql.cache.normalized/RemovedFieldsAndRecords // com.apollographql.cache.normalized/removeStaleFields|removeStaleFields@com.apollographql.cache.normalized.api.NormalizedCache(com.apollographql.cache.normalized.api.MaxAgeProvider;kotlin.time.Duration){}[0] @@ -572,10 +571,10 @@ final fun (com.apollographql.cache.normalized/ApolloStore).com.apollographql.cac final fun (com.apollographql.cache.normalized/ApolloStore).com.apollographql.cache.normalized/removeStaleFields(com.apollographql.cache.normalized.api/MaxAgeProvider, kotlin.time/Duration = ...): com.apollographql.cache.normalized/RemovedFieldsAndRecords // com.apollographql.cache.normalized/removeStaleFields|removeStaleFields@com.apollographql.cache.normalized.ApolloStore(com.apollographql.cache.normalized.api.MaxAgeProvider;kotlin.time.Duration){}[0] final fun (com.apollographql.cache.normalized/ApolloStore).com.apollographql.cache.normalized/removeUnreachableRecords(): kotlin.collections/Set // com.apollographql.cache.normalized/removeUnreachableRecords|removeUnreachableRecords@com.apollographql.cache.normalized.ApolloStore(){}[0] final fun (kotlin.collections/Collection?).com.apollographql.cache.normalized.api/dependentKeys(): kotlin.collections/Set // com.apollographql.cache.normalized.api/dependentKeys|dependentKeys@kotlin.collections.Collection?(){}[0] -final fun (kotlin.collections/Map).com.apollographql.cache.normalized/getReachableCacheKeys(): kotlin.collections/Set // com.apollographql.cache.normalized/getReachableCacheKeys|getReachableCacheKeys@kotlin.collections.Map(){}[0] +final fun (kotlin.collections/Map).com.apollographql.cache.normalized/getReachableCacheKeys(): kotlin.collections/Set // com.apollographql.cache.normalized/getReachableCacheKeys|getReachableCacheKeys@kotlin.collections.Map(){}[0] final fun <#A: com.apollographql.apollo.api/Executable.Data> (#A).com.apollographql.cache.normalized.api/withErrors(com.apollographql.apollo.api/Executable<#A>, kotlin.collections/List?, com.apollographql.apollo.api/CustomScalarAdapters = ...): kotlin.collections/Map // com.apollographql.cache.normalized.api/withErrors|withErrors@0:0(com.apollographql.apollo.api.Executable<0:0>;kotlin.collections.List?;com.apollographql.apollo.api.CustomScalarAdapters){0§}[0] -final fun <#A: com.apollographql.apollo.api/Executable.Data> (#A).com.apollographql.cache.normalized.internal/normalized(com.apollographql.apollo.api/Executable<#A>, kotlin/String = ..., com.apollographql.apollo.api/CustomScalarAdapters = ..., com.apollographql.cache.normalized.api/CacheKeyGenerator = ..., com.apollographql.cache.normalized.api/MetadataGenerator = ..., com.apollographql.cache.normalized.api/FieldKeyGenerator = ..., com.apollographql.cache.normalized.api/EmbeddedFieldsProvider = ...): kotlin.collections/Map // com.apollographql.cache.normalized.internal/normalized|normalized@0:0(com.apollographql.apollo.api.Executable<0:0>;kotlin.String;com.apollographql.apollo.api.CustomScalarAdapters;com.apollographql.cache.normalized.api.CacheKeyGenerator;com.apollographql.cache.normalized.api.MetadataGenerator;com.apollographql.cache.normalized.api.FieldKeyGenerator;com.apollographql.cache.normalized.api.EmbeddedFieldsProvider){0§}[0] -final fun <#A: com.apollographql.apollo.api/Executable.Data> (kotlin.collections/Map).com.apollographql.cache.normalized.internal/normalized(com.apollographql.apollo.api/Executable<#A>, kotlin/String = ..., com.apollographql.apollo.api/CustomScalarAdapters = ..., com.apollographql.cache.normalized.api/CacheKeyGenerator = ..., com.apollographql.cache.normalized.api/MetadataGenerator = ..., com.apollographql.cache.normalized.api/FieldKeyGenerator = ..., com.apollographql.cache.normalized.api/EmbeddedFieldsProvider = ...): kotlin.collections/Map // com.apollographql.cache.normalized.internal/normalized|normalized@kotlin.collections.Map(com.apollographql.apollo.api.Executable<0:0>;kotlin.String;com.apollographql.apollo.api.CustomScalarAdapters;com.apollographql.cache.normalized.api.CacheKeyGenerator;com.apollographql.cache.normalized.api.MetadataGenerator;com.apollographql.cache.normalized.api.FieldKeyGenerator;com.apollographql.cache.normalized.api.EmbeddedFieldsProvider){0§}[0] +final fun <#A: com.apollographql.apollo.api/Executable.Data> (#A).com.apollographql.cache.normalized.internal/normalized(com.apollographql.apollo.api/Executable<#A>, com.apollographql.cache.normalized.api/CacheKey = ..., com.apollographql.apollo.api/CustomScalarAdapters = ..., com.apollographql.cache.normalized.api/CacheKeyGenerator = ..., com.apollographql.cache.normalized.api/MetadataGenerator = ..., com.apollographql.cache.normalized.api/FieldKeyGenerator = ..., com.apollographql.cache.normalized.api/EmbeddedFieldsProvider = ...): kotlin.collections/Map // com.apollographql.cache.normalized.internal/normalized|normalized@0:0(com.apollographql.apollo.api.Executable<0:0>;com.apollographql.cache.normalized.api.CacheKey;com.apollographql.apollo.api.CustomScalarAdapters;com.apollographql.cache.normalized.api.CacheKeyGenerator;com.apollographql.cache.normalized.api.MetadataGenerator;com.apollographql.cache.normalized.api.FieldKeyGenerator;com.apollographql.cache.normalized.api.EmbeddedFieldsProvider){0§}[0] +final fun <#A: com.apollographql.apollo.api/Executable.Data> (kotlin.collections/Map).com.apollographql.cache.normalized.internal/normalized(com.apollographql.apollo.api/Executable<#A>, com.apollographql.cache.normalized.api/CacheKey = ..., com.apollographql.apollo.api/CustomScalarAdapters = ..., com.apollographql.cache.normalized.api/CacheKeyGenerator = ..., com.apollographql.cache.normalized.api/MetadataGenerator = ..., com.apollographql.cache.normalized.api/FieldKeyGenerator = ..., com.apollographql.cache.normalized.api/EmbeddedFieldsProvider = ...): kotlin.collections/Map // com.apollographql.cache.normalized.internal/normalized|normalized@kotlin.collections.Map(com.apollographql.apollo.api.Executable<0:0>;com.apollographql.cache.normalized.api.CacheKey;com.apollographql.apollo.api.CustomScalarAdapters;com.apollographql.cache.normalized.api.CacheKeyGenerator;com.apollographql.cache.normalized.api.MetadataGenerator;com.apollographql.cache.normalized.api.FieldKeyGenerator;com.apollographql.cache.normalized.api.EmbeddedFieldsProvider){0§}[0] final fun <#A: com.apollographql.apollo.api/Mutation.Data> (com.apollographql.apollo.api/ApolloRequest.Builder<#A>).com.apollographql.cache.normalized/optimisticUpdates(#A): com.apollographql.apollo.api/ApolloRequest.Builder<#A> // com.apollographql.cache.normalized/optimisticUpdates|optimisticUpdates@com.apollographql.apollo.api.ApolloRequest.Builder<0:0>(0:0){0§}[0] final fun <#A: com.apollographql.apollo.api/Mutation.Data> (com.apollographql.apollo/ApolloCall<#A>).com.apollographql.cache.normalized/optimisticUpdates(#A): com.apollographql.apollo/ApolloCall<#A> // com.apollographql.cache.normalized/optimisticUpdates|optimisticUpdates@com.apollographql.apollo.ApolloCall<0:0>(0:0){0§}[0] final fun <#A: com.apollographql.apollo.api/Operation.Data> (com.apollographql.apollo.api/ApolloRequest.Builder<#A>).com.apollographql.cache.normalized/fetchFromCache(kotlin/Boolean): com.apollographql.apollo.api/ApolloRequest.Builder<#A> // com.apollographql.cache.normalized/fetchFromCache|fetchFromCache@com.apollographql.apollo.api.ApolloRequest.Builder<0:0>(kotlin.Boolean){0§}[0] diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt index 8086c22d..666f7ed2 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/ApolloStore.kt @@ -26,6 +26,7 @@ import com.apollographql.cache.normalized.api.NormalizedCache import com.apollographql.cache.normalized.api.NormalizedCacheFactory import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.RecordMerger +import com.apollographql.cache.normalized.api.RecordValue import com.apollographql.cache.normalized.api.TypePolicyCacheKeyGenerator import com.apollographql.cache.normalized.internal.DefaultApolloStore import com.benasher44.uuid.Uuid @@ -238,15 +239,27 @@ interface ApolloStore { */ fun remove(cacheKeys: List, cascade: Boolean = true): Int + /** + * Trims the store if its size exceeds [maxSizeBytes]. The amount of data to remove is determined by [trimFactor]. + * The oldest records are removed according to their update date. + * + * This may not be supported by all cache implementations (currently this is implemented by the SQL cache). + * + * @param maxSizeBytes the size of the cache in bytes above which the cache should be trimmed. + * @param trimFactor the factor of the cache size to trim. + * @return the cache size in bytes after trimming or -1 if the operation is not supported. + */ + fun trim(maxSizeBytes: Long, trimFactor: Float = 0.1f): Long + /** * Normalizes executable data to a map of [Record] keyed by [Record.key]. */ fun normalize( executable: Executable, dataWithErrors: DataWithErrors, - rootKey: String = CacheKey.rootKey().key, + rootKey: CacheKey = CacheKey.rootKey(), customScalarAdapters: CustomScalarAdapters = CustomScalarAdapters.Empty, - ): Map + ): Map /** * Publishes a set of keys that have changed. This will notify subscribers of [changedKeys]. @@ -273,7 +286,7 @@ interface ApolloStore { * * This is a synchronous operation that might block if the underlying cache is doing IO. */ - fun dump(): Map, Map> + fun dump(): Map, Map> /** * Releases resources associated with this store. @@ -312,16 +325,18 @@ internal interface ApolloStoreInterceptor : ApolloInterceptor internal fun ApolloStore.cacheDumpProvider(): () -> Map>>> { return { dump().map { (cacheClass, cacheRecords) -> - cacheClass.normalizedCacheName() to cacheRecords.mapValues { (_, record) -> - record.size to record.fields.mapValues { (_, value) -> - value.toExternal() - } - } + cacheClass.normalizedCacheName() to cacheRecords + .mapKeys { (key, _) -> key.keyToString() } + .mapValues { (_, record) -> + record.size to record.fields.mapValues { (_, value) -> + value.toExternal() + } + } }.toMap() } } -private fun Any?.toExternal(): Any? { +private fun RecordValue.toExternal(): Any? { return when (this) { null -> null is String -> this @@ -330,7 +345,8 @@ private fun Any?.toExternal(): Any? { is Long -> this is Double -> this is JsonNumber -> this - is CacheKey -> this.serialize() + is CacheKey -> "ApolloCacheReference{${this.keyToString()}}" + is Error -> "ApolloCacheError{${this.message}}" is List<*> -> { map { it.toExternal() } } diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/GarbageCollection.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/GarbageCollection.kt index 4cf0acc3..fb9f5b69 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/GarbageCollection.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/GarbageCollection.kt @@ -11,16 +11,17 @@ import com.apollographql.cache.normalized.api.NormalizedCache import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.RecordValue import com.apollographql.cache.normalized.api.expirationDate +import com.apollographql.cache.normalized.api.fieldKey import com.apollographql.cache.normalized.api.receivedDate import kotlin.time.Duration @ApolloInternal -fun Map.getReachableCacheKeys(): Set { - fun Map.getReachableCacheKeys(roots: List, reachableCacheKeys: MutableSet) { - val records = roots.mapNotNull { this[it.key] } +fun Map.getReachableCacheKeys(): Set { + fun Map.getReachableCacheKeys(roots: List, reachableCacheKeys: MutableSet) { + val records = roots.mapNotNull { this[it] } val cacheKeysToCheck = mutableListOf() for (record in records) { - reachableCacheKeys.add(CacheKey(record.key)) + reachableCacheKeys.add(record.key) cacheKeysToCheck.addAll(record.referencedFields() - reachableCacheKeys) } if (cacheKeysToCheck.isNotEmpty()) { @@ -34,7 +35,7 @@ fun Map.getReachableCacheKeys(): Set { } @ApolloInternal -fun NormalizedCache.allRecords(): Map { +fun NormalizedCache.allRecords(): Map { return dump().values.fold(emptyMap()) { acc, map -> acc + map } } @@ -49,8 +50,8 @@ fun NormalizedCache.removeUnreachableRecords(): Set { return removeUnreachableRecords(allRecords) } -private fun NormalizedCache.removeUnreachableRecords(allRecords: Map): Set { - val unreachableCacheKeys = allRecords.keys.map { CacheKey(it) } - allRecords.getReachableCacheKeys() +private fun NormalizedCache.removeUnreachableRecords(allRecords: Map): Set { + val unreachableCacheKeys = allRecords.keys - allRecords.getReachableCacheKeys() remove(unreachableCacheKeys, cascade = false) return unreachableCacheKeys.toSet() } @@ -89,11 +90,11 @@ fun NormalizedCache.removeStaleFields( } private fun NormalizedCache.removeStaleFields( - allRecords: MutableMap, + allRecords: MutableMap, maxAgeProvider: MaxAgeProvider, maxStale: Duration, ): RemovedFieldsAndRecords { - val recordsToUpdate = mutableMapOf() + val recordsToUpdate = mutableMapOf() val removedFields = mutableSetOf() for (record in allRecords.values.toList()) { var recordCopy = record @@ -115,7 +116,7 @@ private fun NormalizedCache.removeStaleFields( if (staleDuration >= maxStale.inWholeSeconds) { recordCopy -= field.key recordsToUpdate[record.key] = recordCopy - removedFields.add(record.key + "." + field.key) + removedFields.add(record.key.fieldKey((field.key))) if (recordCopy.isEmptyRecord()) { allRecords.remove(record.key) } else { @@ -133,7 +134,7 @@ private fun NormalizedCache.removeStaleFields( if (staleDuration >= maxStale.inWholeSeconds) { recordCopy -= field.key recordsToUpdate[record.key] = recordCopy - removedFields.add(record.key + "." + field.key) + removedFields.add(record.key.fieldKey(field.key)) if (recordCopy.isEmptyRecord()) { allRecords.remove(record.key) } else { @@ -144,7 +145,7 @@ private fun NormalizedCache.removeStaleFields( } } if (recordsToUpdate.isNotEmpty()) { - remove(recordsToUpdate.keys.map { CacheKey(it) }, cascade = false) + remove(recordsToUpdate.keys, cascade = false) val emptyRecords = recordsToUpdate.values.filter { it.isEmptyRecord() }.toSet() val nonEmptyRecords = recordsToUpdate.values - emptyRecords if (nonEmptyRecords.isNotEmpty()) { @@ -152,7 +153,7 @@ private fun NormalizedCache.removeStaleFields( } return RemovedFieldsAndRecords( removedFields = removedFields, - removedRecords = emptyRecords.map { CacheKey(it.key) }.toSet() + removedRecords = emptyRecords.map { it.key }.toSet() ) } return RemovedFieldsAndRecords(removedFields = emptySet(), removedRecords = emptySet()) @@ -182,12 +183,12 @@ fun ApolloStore.removeStaleFields( * @return the fields and records that were removed. */ fun NormalizedCache.removeDanglingReferences(): RemovedFieldsAndRecords { - val allRecords: MutableMap = allRecords().toMutableMap() + val allRecords: MutableMap = allRecords().toMutableMap() return removeDanglingReferences(allRecords) } -private fun NormalizedCache.removeDanglingReferences(allRecords: MutableMap): RemovedFieldsAndRecords { - val recordsToUpdate = mutableMapOf() +private fun NormalizedCache.removeDanglingReferences(allRecords: MutableMap): RemovedFieldsAndRecords { + val recordsToUpdate = mutableMapOf() val allRemovedFields = mutableSetOf() do { val removedFields = mutableSetOf() @@ -197,7 +198,7 @@ private fun NormalizedCache.removeDanglingReferences(allRecords: MutableMap): Boolean { +private fun RecordValue.isDanglingReference(allRecords: Map): Boolean { return when (this) { - is CacheKey -> allRecords[this.key] == null + is CacheKey -> allRecords[this] == null is List<*> -> any { it.isDanglingReference(allRecords) } is Map<*, *> -> values.any { it.isDanglingReference(allRecords) } else -> false @@ -244,7 +245,7 @@ private fun RecordValue.isDanglingReference(allRecords: Map): Bo private fun Record.isEmptyRecord() = fields.isEmpty() || fields.size == 1 && fields.keys.first() == "__typename" -private fun RecordValue.guessType(allRecords: Map): String { +private fun RecordValue.guessType(allRecords: Map): String { return when (this) { is List<*> -> { val first = firstOrNull() ?: return "" @@ -252,7 +253,7 @@ private fun RecordValue.guessType(allRecords: Map): String { } is CacheKey -> { - allRecords[key]?.get("__typename") as? String ?: "" + allRecords[this]?.get("__typename") as? String ?: "" } else -> { diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/ApolloCacheHeaders.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/ApolloCacheHeaders.kt index f089fe5d..65c72f16 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/ApolloCacheHeaders.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/ApolloCacheHeaders.kt @@ -14,9 +14,7 @@ object ApolloCacheHeaders { */ const val MEMORY_CACHE_ONLY = "memory-cache-only" - /** - * Records from this request should be evicted after being read. - */ + @Deprecated(level = DeprecationLevel.ERROR, message = "This header has no effect and will be removed in a future release. Use ApolloStore.remove() instead.") const val EVICT_AFTER_READ = "evict-after-read" /** diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt index 2719a2e3..37aeede6 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheKey.kt @@ -1,14 +1,18 @@ package com.apollographql.cache.normalized.api +import kotlin.jvm.JvmInline import kotlin.jvm.JvmStatic /** * A [CacheKey] identifies an object in the cache. - * - * @param key The key of the object in the cache. The key must be globally unique. */ -class CacheKey(val key: String) { - +@JvmInline +value class CacheKey( + /** + * The key of the object in the cache. + */ + val key: String, +) { /** * Builds a [CacheKey] from a typename and a list of Strings. * @@ -31,38 +35,13 @@ class CacheKey(val key: String) { */ constructor(typename: String, vararg values: String) : this(typename, values.toList()) - override fun hashCode() = key.hashCode() - override fun equals(other: Any?): Boolean { - return key == (other as? CacheKey)?.key + internal fun keyToString(): String { + return key } - override fun toString() = "CacheKey($key)" - - fun serialize(): String { - return "$SERIALIZATION_TEMPLATE{$key}" - } + override fun toString() = "CacheKey(${keyToString()})" companion object { - // IntelliJ complains about the invalid escape but looks like JS still needs it. - // See https://youtrack.jetbrains.com/issue/KT-47189 - @Suppress("RegExpRedundantEscape") - private val SERIALIZATION_REGEX_PATTERN = Regex("ApolloCacheReference\\{(.*)\\}") - private const val SERIALIZATION_TEMPLATE = "ApolloCacheReference" - - @JvmStatic - fun deserialize(serializedCacheKey: String): CacheKey { - val values = SERIALIZATION_REGEX_PATTERN.matchEntire(serializedCacheKey)?.groupValues - require(values != null && values.size > 1) { - "Not a cache reference: $serializedCacheKey Must be of the form: $SERIALIZATION_TEMPLATE{%s}" - } - return CacheKey(values[1]) - } - - @JvmStatic - fun canDeserialize(value: String): Boolean { - return SERIALIZATION_REGEX_PATTERN.matches(value) - } - private val ROOT_CACHE_KEY = CacheKey("QUERY_ROOT") @JvmStatic @@ -71,3 +50,19 @@ class CacheKey(val key: String) { } } } + +fun CacheKey.isRootKey(): Boolean { + return this == CacheKey.rootKey() +} + +internal fun CacheKey.fieldKey(fieldName: String): String { + return "${keyToString()}.$fieldName" +} + +internal fun CacheKey.append(vararg keys: String): CacheKey { + var cacheKey: CacheKey = this + for (key in keys) { + cacheKey = CacheKey("${cacheKey.key}.$key") + } + return cacheKey +} diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheResolver.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheResolver.kt index 153f8b09..ccef9cc0 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheResolver.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/CacheResolver.kt @@ -100,7 +100,7 @@ class ResolverContext( /** * The key of the parent. Mainly used for debugging */ - val parentKey: String, + val parentKey: CacheKey, /** * The type of the parent @@ -135,7 +135,7 @@ object DefaultCacheResolver : CacheResolver { override fun resolveField(context: ResolverContext): Any? { val fieldKey = context.getFieldKey() if (!context.parent.containsKey(fieldKey)) { - throw CacheMissException(context.parentKey, fieldKey) + throw CacheMissException(context.parentKey.keyToString(), fieldKey) } return context.parent[fieldKey] @@ -190,7 +190,7 @@ class CacheControlCacheResolver( val maxStale = context.cacheHeaders.headerValue(ApolloCacheHeaders.MAX_STALE)?.toLongOrNull() ?: 0L if (staleDuration >= maxStale) { throw CacheMissException( - key = context.parentKey, + key = context.parentKey.keyToString(), fieldName = context.getFieldKey(), stale = true ) @@ -206,7 +206,7 @@ class CacheControlCacheResolver( val maxStale = context.cacheHeaders.headerValue(ApolloCacheHeaders.MAX_STALE)?.toLongOrNull() ?: 0L if (staleDuration >= maxStale) { throw CacheMissException( - key = context.parentKey, + key = context.parentKey.keyToString(), fieldName = context.getFieldKey(), stale = true ) diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/NormalizedCache.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/NormalizedCache.kt index f2f79d28..abc96e6f 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/NormalizedCache.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/NormalizedCache.kt @@ -71,23 +71,22 @@ interface NormalizedCache : ReadOnlyNormalizedCache { fun remove(cacheKeys: Collection, cascade: Boolean): Int /** - * Remove records whose key matches a given pattern from this cache and all chained caches + * Trims the cache if its size exceeds [maxSizeBytes]. The amount of data to remove is determined by [trimFactor]. + * The oldest records are removed according to their update date. * - * @param pattern a pattern to filter the cache keys. 'pattern' is interpreted as in the LIKE operator of Sqlite. - * - '%' matches any sequence of zero or more characters - * - '_' matches any single character - * - The matching is case-insensitive - * - '\' is used as escape - * See https://sqlite.org/lang_expr.html for more details + * Optional operation. * - * @return the number of records deleted accross all caches + * @param maxSizeBytes the size of the cache in bytes above which the cache should be trimmed. + * @param trimFactor the factor of the cache size to trim. + * @return the cache size in bytes after trimming or -1 if the operation is not supported. */ - fun remove(pattern: String): Int - + fun trim(maxSizeBytes: Long, trimFactor: Float = 0.1f): Long { + return -1 + } companion object { @JvmStatic - fun prettifyDump(dump: Map<@JvmSuppressWildcards KClass<*>, Map>): String = dump.prettifyDump() + fun prettifyDump(dump: Map<@JvmSuppressWildcards KClass<*>, Map>): String = dump.prettifyDump() private fun Any?.prettifyDump(level: Int = 0): String { return buildString { @@ -128,6 +127,7 @@ interface NormalizedCache : ReadOnlyNormalizedCache { indent(level + 1) append(when (key) { is KClass<*> -> key.simpleName + is CacheKey -> key.keyToString() else -> key } ) diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/ReadOnlyNormalizedCache.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/ReadOnlyNormalizedCache.kt index 4291864d..5cbf8f00 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/ReadOnlyNormalizedCache.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/ReadOnlyNormalizedCache.kt @@ -9,7 +9,7 @@ interface ReadOnlyNormalizedCache { * @param cacheHeaders The cache headers associated with the request which generated this record. * @return The [Record] for key. If not present return null. */ - fun loadRecord(key: String, cacheHeaders: CacheHeaders): Record? + fun loadRecord(key: CacheKey, cacheHeaders: CacheHeaders): Record? /** * Calls through to [NormalizedCache.loadRecord]. Implementations should override this @@ -19,7 +19,7 @@ interface ReadOnlyNormalizedCache { * @param keys The set of [Record] keys to read. * @param cacheHeaders The cache headers associated with the request which generated this record. */ - fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection + fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection - fun dump(): Map<@JvmSuppressWildcards KClass<*>, Map> + fun dump(): Map<@JvmSuppressWildcards KClass<*>, Map> } diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/Record.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/Record.kt index 09627dae..a6ee7c94 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/Record.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/Record.kt @@ -10,7 +10,7 @@ import com.benasher44.uuid.Uuid * a field is a GraphQL Object a [CacheKey] will be stored instead. */ class Record( - val key: String, + val key: CacheKey, val fields: Map, val mutationId: Uuid? = null, @@ -39,7 +39,7 @@ class Record( * A field key incorporates any GraphQL arguments in addition to the field name. */ fun fieldKeys(): Set { - return fields.keys.map { "$key.$it" }.toSet() + return fields.keys.map { key.fieldKey(it) }.toSet() } /** @@ -71,7 +71,7 @@ class Record( record1.fields[it] != record2.fields[it] } - return changed.map { "${record1.key}.$it" }.toSet() + return changed.map { record1.key.fieldKey(it) }.toSet() } } } diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/RecordMerger.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/RecordMerger.kt index c4bd633b..6808b32c 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/RecordMerger.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/api/RecordMerger.kt @@ -41,7 +41,7 @@ object DefaultRecordMerger : RecordMerger { val existingFieldValue = existing.fields[fieldKey] if (!hasExistingFieldValue || existingFieldValue != incomingFieldValue) { mergedFields[fieldKey] = incomingFieldValue - changedKeys.add("${existing.key}.$fieldKey") + changedKeys.add(existing.key.fieldKey(fieldKey)) } } @@ -107,7 +107,7 @@ class FieldRecordMerger(private val fieldMerger: FieldMerger) : RecordMerger { if (!hasExistingFieldValue) { mergedFields[fieldKey] = incomingFieldValue mergedMetadata[fieldKey] = incoming.metadata[fieldKey].orEmpty() - changedKeys.add("${existing.key}.$fieldKey") + changedKeys.add(existing.key.fieldKey(fieldKey)) } else if (existingFieldValue != incomingFieldValue) { val existingFieldInfo = FieldInfo( value = existingFieldValue, @@ -122,7 +122,7 @@ class FieldRecordMerger(private val fieldMerger: FieldMerger) : RecordMerger { mergedFields[fieldKey] = mergeResult.value mergedMetadata[fieldKey] = mergeResult.metadata - changedKeys.add("${existing.key}.$fieldKey") + changedKeys.add(existing.key.fieldKey(fieldKey)) } } diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/CacheBatchReader.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/CacheBatchReader.kt index df20c4bb..b5a3f2b7 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/CacheBatchReader.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/CacheBatchReader.kt @@ -24,7 +24,7 @@ import kotlin.jvm.JvmSuppressWildcards */ internal class CacheBatchReader( private val cache: ReadOnlyNormalizedCache, - private val rootKey: String, + private val rootKey: CacheKey, private val variables: Executable.Variables, private val cacheResolver: CacheResolver, private val cacheHeaders: CacheHeaders, @@ -38,7 +38,7 @@ internal class CacheBatchReader( * @param path: the path where this pending reference needs to be inserted */ class PendingReference( - val key: String, + val key: CacheKey, val path: List, val fieldPath: List, val selections: List, @@ -57,6 +57,11 @@ internal class CacheBatchReader( */ private var isStale = false + /** + * True if at least one of the resolved fields is an Error, or if a cache miss happened + */ + private var hasErrors = false + private val pendingReferences = mutableListOf() private class CollectState(val variables: Executable.Variables) { @@ -104,23 +109,29 @@ internal class CacheBatchReader( ) while (pendingReferences.isNotEmpty()) { - val records = cache.loadRecords(pendingReferences.map { it.key }, cacheHeaders).associateBy { it.key } - + val records: Map = cache.loadRecords(pendingReferences.map { it.key }, cacheHeaders) + .also { + if (!hasErrors) { + hasErrors = it.any { it.values.any { it.hasErrors() } } + } + } + .associateBy { it.key } val copy = pendingReferences.toList() pendingReferences.clear() copy.forEach { pendingReference -> var record = records[pendingReference.key] if (record == null) { - if (pendingReference.key == CacheKey.rootKey().key) { + if (pendingReference.key == CacheKey.rootKey()) { // This happens the very first time we read the cache record = Record(pendingReference.key, emptyMap()) } else { if (returnPartialResponses) { data[pendingReference.path] = - cacheMissError(CacheMissException(key = pendingReference.key, fieldName = null, stale = false), path = pendingReference.path) + cacheMissError(CacheMissException(key = pendingReference.key.keyToString(), fieldName = null, stale = false), path = pendingReference.path) + hasErrors = true return@forEach } else { - throw CacheMissException(pendingReference.key) + throw CacheMissException(pendingReference.key.keyToString()) } } } @@ -149,6 +160,7 @@ internal class CacheBatchReader( } catch (e: CacheMissException) { if (e.stale) isStale = true if (returnPartialResponses) { + hasErrors = true cacheMissError(e, pendingReference.path + it.responseName) } else { throw e @@ -163,7 +175,11 @@ internal class CacheBatchReader( } } - return CacheBatchReaderData(data, CacheHeaders.Builder().apply { if (isStale) addHeader(ApolloCacheHeaders.STALE, "true") }.build()) + return CacheBatchReaderData( + data = data, + cacheHeaders = CacheHeaders.Builder().apply { if (isStale) addHeader(ApolloCacheHeaders.STALE, "true") }.build(), + hasErrors = hasErrors, + ) } private fun Any?.unwrap(): Any? { @@ -194,7 +210,7 @@ internal class CacheBatchReader( is CacheKey -> { pendingReferences.add( PendingReference( - key = key, + key = this, selections = selections, parentType = parentType, path = path, @@ -224,7 +240,7 @@ internal class CacheBatchReader( field = it, variables = variables, parent = this, - parentKey = "", + parentKey = CacheKey(""), parentType = parentType, cacheHeaders = cacheHeaders, fieldKeyGenerator = fieldKeyGenerator, @@ -234,6 +250,7 @@ internal class CacheBatchReader( } catch (e: CacheMissException) { if (e.stale) isStale = true if (returnPartialResponses) { + hasErrors = true cacheMissError(e, path + it.responseName) } else { throw e @@ -248,6 +265,7 @@ internal class CacheBatchReader( internal class CacheBatchReaderData( private val data: Map, Any>, val cacheHeaders: CacheHeaders, + val hasErrors: Boolean, ) { @Suppress("UNCHECKED_CAST") internal fun toMap(withErrors: Boolean = true): DataWithErrors { @@ -304,4 +322,20 @@ internal class CacheBatchReader( .cacheMissException(exception) .build() } + + @Suppress("UNCHECKED_CAST") + internal fun Any?.hasErrors(): Boolean { + val queue = ArrayDeque() + queue.add(this) + while (queue.isNotEmpty()) { + val current = queue.removeFirst() + when (current) { + is Error -> return true + is List<*> -> queue.addAll(current) + // Embedded fields can be represented as Maps + is Map<*, *> -> queue.addAll(current.values) + } + } + return false + } } diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt index 8d8d83e2..8c73a790 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/DefaultApolloStore.kt @@ -104,9 +104,9 @@ internal class DefaultApolloStore( override fun normalize( executable: Executable, dataWithErrors: DataWithErrors, - rootKey: String, + rootKey: CacheKey, customScalarAdapters: CustomScalarAdapters, - ): Map { + ): Map { return dataWithErrors.normalized( executable = executable, rootKey = rootKey, @@ -129,7 +129,7 @@ internal class DefaultApolloStore( cacheHeaders = cacheHeaders, cacheResolver = cacheResolver, variables = variables, - rootKey = CacheKey.rootKey().key, + rootKey = CacheKey.rootKey(), rootSelections = operation.rootField().selections, rootField = operation.rootField(), fieldKeyGenerator = fieldKeyGenerator, @@ -140,7 +140,11 @@ internal class DefaultApolloStore( @Suppress("UNCHECKED_CAST") val dataWithNulls: Map? = - propagateErrors(dataWithErrors, operation.rootField(), errors) as Map? + if (batchReaderData.hasErrors) { + propagateErrors(dataWithErrors, operation.rootField(), errors) + } else { + dataWithErrors + } as Map? val falseVariablesCustomScalarAdapter = customScalarAdapters.newBuilder() .falseVariables(variables.valueMap.filter { it.value == false }.keys) @@ -181,7 +185,7 @@ internal class DefaultApolloStore( cacheHeaders = cacheHeaders, cacheResolver = cacheResolver, variables = variables, - rootKey = cacheKey.key, + rootKey = cacheKey, rootSelections = fragment.rootField().selections, rootField = fragment.rootField(), fieldKeyGenerator = fieldKeyGenerator, @@ -234,7 +238,7 @@ internal class DefaultApolloStore( val records = normalize( executable = fragment, dataWithErrors = dataWithErrors, - rootKey = cacheKey.key, + rootKey = cacheKey, customScalarAdapters = customScalarAdapters, ).values return cache.merge(records, cacheHeaders, recordMerger) @@ -276,7 +280,7 @@ internal class DefaultApolloStore( val records = normalize( executable = fragment, dataWithErrors = dataWithErrors, - rootKey = cacheKey.key, + rootKey = cacheKey, customScalarAdapters = customScalarAdapters, ).values.map { record -> Record( @@ -298,7 +302,11 @@ internal class DefaultApolloStore( return cache.merge(record, cacheHeaders, recordMerger) } - override fun dump(): Map, Map> { + override fun trim(maxSizeBytes: Long, trimFactor: Float): Long { + return cache.trim(maxSizeBytes, trimFactor) + } + + override fun dump(): Map, Map> { return cache.dump() } diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt index f2097106..355f436e 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/Normalizer.kt @@ -27,6 +27,8 @@ import com.apollographql.cache.normalized.api.MetadataGenerator import com.apollographql.cache.normalized.api.MetadataGeneratorContext import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.TypePolicyCacheKeyGenerator +import com.apollographql.cache.normalized.api.append +import com.apollographql.cache.normalized.api.isRootKey import com.apollographql.cache.normalized.api.withErrors /** @@ -35,19 +37,19 @@ import com.apollographql.cache.normalized.api.withErrors */ internal class Normalizer( private val variables: Executable.Variables, - private val rootKey: String, + private val rootKey: CacheKey, private val cacheKeyGenerator: CacheKeyGenerator, private val metadataGenerator: MetadataGenerator, private val fieldKeyGenerator: FieldKeyGenerator, private val embeddedFieldsProvider: EmbeddedFieldsProvider, ) { - private val records = mutableMapOf() + private val records = mutableMapOf() fun normalize( map: DataWithErrors, selections: List, parentType: CompiledNamedType, - ): Map { + ): Map { buildRecord(map, rootKey, selections, parentType) return records @@ -68,7 +70,7 @@ internal class Normalizer( */ private fun buildFields( obj: DataWithErrors, - key: String, + key: CacheKey, selections: List, parentType: CompiledNamedType, ): Map { @@ -99,7 +101,7 @@ internal class Normalizer( val fieldKey = fieldKeyGenerator.getFieldKey(FieldKeyContext(parentType.name, mergedField, variables)) - val base = if (key == CacheKey.rootKey().key) { + val base = if (key.isRootKey()) { // If we're at the root level, skip `QUERY_ROOT` altogether to save a few bytes null } else { @@ -109,7 +111,7 @@ internal class Normalizer( value = entry.value, field = mergedField, type_ = mergedField.type, - path = base.append(fieldKey), + path = base?.append(fieldKey) ?: CacheKey(fieldKey), embeddedFields = embeddedFieldsProvider.getEmbeddedFields(EmbeddedFieldsContext(parentType)), ) val metadata = if (entry.value is Error) { @@ -124,30 +126,28 @@ internal class Normalizer( } /** - * - * * @param obj the json node representing the object - * @param key the key for this record + * @param cacheKey the key for this record * @param selections the selections queried on this object * @return the CacheKey if this object has a CacheKey or the new Map if the object was embedded */ private fun buildRecord( obj: DataWithErrors, - key: String, + cacheKey: CacheKey, selections: List, parentType: CompiledNamedType, ): CacheKey { - val fields = buildFields(obj, key, selections, parentType) + val fields = buildFields(obj, cacheKey, selections, parentType) val fieldValues = fields.mapValues { it.value.fieldValue } val metadata = fields.mapValues { it.value.metadata }.filterValues { it.isNotEmpty() } val record = Record( - key = key, + key = cacheKey, fields = fieldValues, mutationId = null, metadata = metadata, ) - val existingRecord = records[key] + val existingRecord = records[cacheKey] val mergedRecord = if (existingRecord != null) { /** @@ -157,12 +157,11 @@ internal class Normalizer( } else { record } - records[key] = mergedRecord + records[cacheKey] = mergedRecord - return CacheKey(key) + return cacheKey } - /** * Replace all objects in [value] with [CacheKey] and if [value] is an object itself, returns it as a [CacheKey] * @@ -177,7 +176,7 @@ internal class Normalizer( value: Any?, field: CompiledField, type_: CompiledType, - path: String, + path: CacheKey, embeddedFields: List, ): Any? { /** @@ -210,7 +209,7 @@ internal class Normalizer( var key = cacheKeyGenerator.cacheKeyForObject( value as Map, CacheKeyGeneratorContext(field, variables), - )?.key + ) if (key == null) { key = path @@ -260,9 +259,6 @@ internal class Normalizer( collectFields(selections, parentType, typename, state) return state.fields } - - // The receiver can be null for the root query to save some space in the cache by not storing QUERY_ROOT all over the place - private fun String?.append(next: String): String = if (this == null) next else "$this.$next" } /** @@ -270,13 +266,13 @@ internal class Normalizer( */ fun D.normalized( executable: Executable, - rootKey: String = CacheKey.rootKey().key, + rootKey: CacheKey = CacheKey.rootKey(), customScalarAdapters: CustomScalarAdapters = CustomScalarAdapters.Empty, cacheKeyGenerator: CacheKeyGenerator = TypePolicyCacheKeyGenerator, metadataGenerator: MetadataGenerator = EmptyMetadataGenerator, fieldKeyGenerator: FieldKeyGenerator = DefaultFieldKeyGenerator, embeddedFieldsProvider: EmbeddedFieldsProvider = DefaultEmbeddedFieldsProvider, -): Map { +): Map { val dataWithErrors = this.withErrors(executable, null, customScalarAdapters) return dataWithErrors.normalized(executable, rootKey, customScalarAdapters, cacheKeyGenerator, metadataGenerator, fieldKeyGenerator, embeddedFieldsProvider) } @@ -286,13 +282,13 @@ fun D.normalized( */ fun DataWithErrors.normalized( executable: Executable, - rootKey: String = CacheKey.rootKey().key, + rootKey: CacheKey = CacheKey.rootKey(), customScalarAdapters: CustomScalarAdapters = CustomScalarAdapters.Empty, cacheKeyGenerator: CacheKeyGenerator = TypePolicyCacheKeyGenerator, metadataGenerator: MetadataGenerator = EmptyMetadataGenerator, fieldKeyGenerator: FieldKeyGenerator = DefaultFieldKeyGenerator, embeddedFieldsProvider: EmbeddedFieldsProvider = DefaultEmbeddedFieldsProvider, -): Map { +): Map { val variables = executable.variables(customScalarAdapters, withDefaultValues = true) return Normalizer(variables, rootKey, cacheKeyGenerator, metadataGenerator, fieldKeyGenerator, embeddedFieldsProvider) .normalize(this, executable.rootField().selections, executable.rootField().type.rawType()) diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/OptimisticNormalizedCache.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/OptimisticNormalizedCache.kt index f7cd26f4..880e2dd8 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/OptimisticNormalizedCache.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/OptimisticNormalizedCache.kt @@ -11,14 +11,14 @@ import kotlin.math.max import kotlin.reflect.KClass internal class OptimisticNormalizedCache(private val wrapped: NormalizedCache) : NormalizedCache { - private val recordJournals = ConcurrentMap() + private val recordJournals = ConcurrentMap() - override fun loadRecord(key: String, cacheHeaders: CacheHeaders): Record? { + override fun loadRecord(key: CacheKey, cacheHeaders: CacheHeaders): Record? { val nonOptimisticRecord = wrapped.loadRecord(key, cacheHeaders) return nonOptimisticRecord.mergeJournalRecord(key) } - override fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection { + override fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection { val nonOptimisticRecords = wrapped.loadRecords(keys, cacheHeaders).associateBy { it.key } return keys.mapNotNull { key -> nonOptimisticRecords[key].mergeJournalRecord(key) @@ -46,16 +46,20 @@ internal class OptimisticNormalizedCache(private val wrapped: NormalizedCache) : return wrapped.remove(cacheKeys, cascade) + internalRemove(cacheKeys, cascade) } + override fun trim(maxSizeBytes: Long, trimFactor: Float): Long { + return wrapped.trim(maxSizeBytes, trimFactor) + } + private fun internalRemove(cacheKeys: Collection, cascade: Boolean): Int { var total = 0 val referencedCacheKeys = mutableSetOf() for (cacheKey in cacheKeys) { - val removedRecordJournal = recordJournals.remove(cacheKey.key) + val removedRecordJournal = recordJournals.remove(cacheKey) if (removedRecordJournal != null) { total++ if (cascade) { for (cacheReference in removedRecordJournal.current.referencedFields()) { - referencedCacheKeys += CacheKey(cacheReference.key) + referencedCacheKeys += cacheReference } } } @@ -66,21 +70,6 @@ internal class OptimisticNormalizedCache(private val wrapped: NormalizedCache) : return total } - override fun remove(pattern: String): Int { - var removed = wrapped.remove(pattern) - - val regex = patternToRegex(pattern) - val keys = HashSet(recordJournals.keys) // local copy to avoid concurrent modification - keys.forEach { key -> - if (regex.matches(key)) { - recordJournals.remove(key) - removed++ - } - } - - return removed - } - fun addOptimisticUpdates(recordSet: Collection): Set { return recordSet.flatMap { addOptimisticUpdate(it) @@ -111,11 +100,11 @@ internal class OptimisticNormalizedCache(private val wrapped: NormalizedCache) : return changedCacheKeys } - override fun dump(): Map, Map> { + override fun dump(): Map, Map> { return mapOf(this::class to recordJournals.mapValues { (_, journal) -> journal.current }) + wrapped.dump() } - private fun Record?.mergeJournalRecord(key: String): Record? { + private fun Record?.mergeJournalRecord(key: CacheKey): Record? { val journal = recordJournals[key] return if (journal != null) { this?.mergeWith(journal.current)?.first ?: journal.current diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/RecordWeigher.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/RecordWeigher.kt index e5b9b6e6..8129d27a 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/RecordWeigher.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/internal/RecordWeigher.kt @@ -5,7 +5,6 @@ import com.apollographql.apollo.api.json.JsonNumber import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.RecordValue -import okio.internal.commonAsUtf8ToByteArray import kotlin.jvm.JvmStatic internal object RecordWeigher { @@ -27,7 +26,7 @@ internal object RecordWeigher { @JvmStatic fun calculateBytes(record: Record): Int { - var size = SIZE_OF_RECORD_OVERHEAD + record.key.length + var size = SIZE_OF_RECORD_OVERHEAD + record.key.key.length for ((key, value) in record.fields) { size += key.length + weighField(value) } @@ -36,14 +35,18 @@ internal object RecordWeigher { } private fun weighField(field: RecordValue): Int { + /* + * Note: for Strings we use the character length where we should use the UTF-8 size, + * but this is a good enough estimation for the weight, and avoids a bit of overhead. + */ return when (field) { null -> SIZE_OF_NULL - is String -> field.commonAsUtf8ToByteArray().size + is String -> field.length is Boolean -> SIZE_OF_BOOLEAN is Int -> SIZE_OF_INT is Long -> SIZE_OF_LONG // Might happen with LongDataAdapter is Double -> SIZE_OF_DOUBLE - is JsonNumber -> field.value.commonAsUtf8ToByteArray().size + SIZE_OF_LONG + is JsonNumber -> field.value.length + SIZE_OF_LONG /** * Custom scalars with a json object representation are stored directly in the record */ @@ -56,12 +59,12 @@ internal object RecordWeigher { } is CacheKey -> { - SIZE_OF_CACHE_KEY_OVERHEAD + field.key.commonAsUtf8ToByteArray().size + SIZE_OF_CACHE_KEY_OVERHEAD + field.key.length } is Error -> { SIZE_OF_ERROR_OVERHEAD + - field.message.commonAsUtf8ToByteArray().size + + field.message.length + (field.locations?.size ?: 0) * SIZE_OF_INT * 2 + weighField(field.path) + weighField(field.extensions) + diff --git a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt index bf5fd586..41fdaed4 100644 --- a/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt +++ b/normalized-cache-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/memory/MemoryCache.kt @@ -10,7 +10,6 @@ import com.apollographql.cache.normalized.api.RecordMerger import com.apollographql.cache.normalized.api.RecordMergerContext import com.apollographql.cache.normalized.api.withDates import com.apollographql.cache.normalized.internal.Lock -import com.apollographql.cache.normalized.internal.patternToRegex import com.apollographql.cache.normalized.memory.internal.LruCache import kotlin.jvm.JvmOverloads import kotlin.reflect.KClass @@ -40,22 +39,22 @@ class MemoryCache( return lock?.read { block() } ?: block() } - private val lruCache = LruCache(maxSize = maxSizeBytes, expireAfterMillis = expireAfterMillis) { key, record -> - key.length + record.sizeInBytes + private val lruCache = LruCache(maxSize = maxSizeBytes, expireAfterMillis = expireAfterMillis) { key, record -> + key.key.length + record.sizeInBytes } val size: Int get() = lockRead { lruCache.weight() } - override fun loadRecord(key: String, cacheHeaders: CacheHeaders): Record? = lockRead { - val record = internalLoadRecord(key, cacheHeaders) + override fun loadRecord(key: CacheKey, cacheHeaders: CacheHeaders): Record? = lockRead { + val record = lruCache[key] record ?: nextCache?.loadRecord(key, cacheHeaders)?.also { nextCachedRecord -> lruCache[key] = nextCachedRecord } } - override fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection = lockRead { - val recordsByKey: Map = keys.associateWith { key -> internalLoadRecord(key, cacheHeaders) } + override fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection = lockRead { + val recordsByKey: Map = keys.associateWith { key -> lruCache[key] } val missingKeys = recordsByKey.filterValues { it == null }.keys val nextCachedRecords = nextCache?.loadRecords(missingKeys, cacheHeaders).orEmpty() for (record in nextCachedRecords) { @@ -64,14 +63,6 @@ class MemoryCache( recordsByKey.values.filterNotNull() + nextCachedRecords } - private fun internalLoadRecord(key: String, cacheHeaders: CacheHeaders): Record? { - return lruCache[key]?.also { - if (cacheHeaders.hasHeader(ApolloCacheHeaders.EVICT_AFTER_READ)) { - lruCache.remove(key) - } - } - } - override fun clearAll() { lockWrite { lruCache.clear() @@ -95,9 +86,9 @@ class MemoryCache( var total = 0 val referencedCacheKeys = mutableSetOf() for (cacheKey in cacheKeys) { - val removedRecord = lruCache.remove(cacheKey.key) + val removedRecord = lruCache.remove(cacheKey) if (cascade && removedRecord != null) { - referencedCacheKeys += removedRecord.referencedFields().map { CacheKey(it.key) } + referencedCacheKeys += removedRecord.referencedFields() } if (removedRecord != null) { total++ @@ -109,23 +100,6 @@ class MemoryCache( return total } - override fun remove(pattern: String): Int { - val regex = patternToRegex(pattern) - return lockWrite { - var total = 0 - val keys = HashSet(lruCache.asMap().keys) // local copy to avoid concurrent modification - keys.forEach { - if (regex.matches(it)) { - lruCache.remove(it) - total++ - } - } - - val chainRemoved = nextCache?.remove(pattern) ?: 0 - total + chainRemoved - } - } - override fun merge(record: Record, cacheHeaders: CacheHeaders, recordMerger: RecordMerger): Set { if (cacheHeaders.hasHeader(ApolloCacheHeaders.DO_NOT_STORE)) { return emptySet() @@ -149,19 +123,19 @@ class MemoryCache( private fun internalMerge(record: Record, cacheHeaders: CacheHeaders, recordMerger: RecordMerger): Set { val receivedDate = cacheHeaders.headerValue(ApolloCacheHeaders.RECEIVED_DATE) val expirationDate = cacheHeaders.headerValue(ApolloCacheHeaders.EXPIRATION_DATE) - val oldRecord = loadRecord(record.key, cacheHeaders) - val changedKeys = if (oldRecord == null) { + val existingRecord = loadRecord(record.key, cacheHeaders) + val changedKeys = if (existingRecord == null) { lruCache[record.key] = record.withDates(receivedDate = receivedDate, expirationDate = expirationDate) record.fieldKeys() } else { - val (mergedRecord, changedKeys) = recordMerger.merge(RecordMergerContext(existing = oldRecord, incoming = record, cacheHeaders = cacheHeaders)) + val (mergedRecord, changedKeys) = recordMerger.merge(RecordMergerContext(existing = existingRecord, incoming = record, cacheHeaders = cacheHeaders)) lruCache[record.key] = mergedRecord.withDates(receivedDate = receivedDate, expirationDate = expirationDate) changedKeys } return changedKeys } - override fun dump(): Map, Map> { + override fun dump(): Map, Map> { return lockRead { mapOf(this::class to lruCache.asMap().mapValues { (_, record) -> record }) + nextCache?.dump().orEmpty() @@ -171,6 +145,14 @@ class MemoryCache( internal fun clearCurrentCache() { lruCache.clear() } + + override fun trim(maxSizeBytes: Long, trimFactor: Float): Long { + return if (nextCache == null) { + -1 + } else { + lockWrite { nextCache.trim(maxSizeBytes, trimFactor) } + } + } } class MemoryCacheFactory @JvmOverloads constructor( diff --git a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/CacheKeyResolverTest.kt b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/CacheKeyResolverTest.kt index e82edcd7..9bf2d916 100644 --- a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/CacheKeyResolverTest.kt +++ b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/CacheKeyResolverTest.kt @@ -41,7 +41,7 @@ class CacheKeyResolverTest { field, Executable.Variables(emptyMap()), emptyMap(), - "", + CacheKey(""), "", CacheHeaders(emptyMap()), DefaultFieldKeyGenerator, diff --git a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/DefaultRecordMergerTest.kt b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/DefaultRecordMergerTest.kt index d1ca6c3d..f303cd1a 100644 --- a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/DefaultRecordMergerTest.kt +++ b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/DefaultRecordMergerTest.kt @@ -1,6 +1,7 @@ package com.apollographql.cache.normalized import com.apollographql.cache.normalized.api.CacheHeaders +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.DefaultRecordMerger import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.RecordMergerContext @@ -11,7 +12,7 @@ class DefaultRecordMergerTest { @Test fun mergeMetaData() { val existing = Record( - key = "key", + key = CacheKey("key"), fields = mapOf( "field1" to "value1", "field2" to "value2" @@ -30,7 +31,7 @@ class DefaultRecordMergerTest { ) val incoming = Record( - key = "key", + key = CacheKey("key"), fields = mapOf( "field1" to "value1.incoming", "field3" to "value3", @@ -51,7 +52,7 @@ class DefaultRecordMergerTest { val mergedRecord = DefaultRecordMerger.merge(RecordMergerContext(existing, incoming, CacheHeaders.NONE)).first val expected = Record( - key = "key", + key = CacheKey("key"), fields = mapOf( "field1" to "value1.incoming", "field2" to "value2", diff --git a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/MemoryCacheTest.kt b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/MemoryCacheTest.kt index d606fd1f..b2fbb5b1 100644 --- a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/MemoryCacheTest.kt +++ b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/MemoryCacheTest.kt @@ -4,9 +4,9 @@ import com.apollographql.cache.normalized.api.ApolloCacheHeaders import com.apollographql.cache.normalized.api.CacheHeaders import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.DefaultRecordMerger -import com.apollographql.cache.normalized.memory.MemoryCache import com.apollographql.cache.normalized.api.NormalizedCache import com.apollographql.cache.normalized.api.Record +import com.apollographql.cache.normalized.memory.MemoryCache import kotlin.test.Test import kotlin.test.assertEquals import kotlin.test.assertNotNull @@ -46,14 +46,14 @@ class MemoryCacheTest { val records = listOf(testRecord1, testRecord2, testRecord3) lruCache.merge(records, CacheHeaders.NONE, DefaultRecordMerger) - val readRecords = lruCache.loadRecords(listOf("key1", "key2", "key3"), CacheHeaders.NONE) + val readRecords = lruCache.loadRecords(listOf(CacheKey("key1"), CacheKey("key2"), CacheKey("key3")), CacheHeaders.NONE) assertTrue(readRecords.containsAll(records)) } @Test fun testLoad_recordNotPresent() { val lruCache = createCache() - val record = lruCache.loadRecord("key1", CacheHeaders.NONE) + val record = lruCache.loadRecord(CacheKey("key1"), CacheHeaders.NONE) assertNull(record) } @@ -151,7 +151,7 @@ class MemoryCacheTest { fun testDualCache_recordNotPresent() { val secondaryCache = createCache() val primaryCache = createCache(nextCache = secondaryCache) - assertNull(primaryCache.loadRecord("key", CacheHeaders.NONE)) + assertNull(primaryCache.loadRecord(CacheKey("key"), CacheHeaders.NONE)) } @@ -185,21 +185,6 @@ class MemoryCacheTest { assertEquals(testRecord.fields, primaryCache.loadRecord(testRecord.key, CacheHeaders.NONE)?.fields) } - - // Tests for StandardCacheHeader compliance. - @Test - fun testHeader_evictAfterRead() { - val lruCache = createCache() - val testRecord = createTestRecord("1") - - lruCache.merge(testRecord, CacheHeaders.NONE, DefaultRecordMerger) - - val headers = CacheHeaders.builder().addHeader(ApolloCacheHeaders.EVICT_AFTER_READ, "true").build() - - assertNotNull(lruCache.loadRecord(testRecord.key, headers)) - assertNull(lruCache.loadRecord(testRecord.key, headers)) - } - @Test fun testHeader_noCache() { val lruCache = createCache() @@ -238,7 +223,7 @@ class MemoryCacheTest { val lruCache = createCache() val record1 = Record( - key = "id_1", + key = CacheKey("id_1"), fields = mapOf( "a" to "stringValueA", "b" to "stringValueB" @@ -246,7 +231,7 @@ class MemoryCacheTest { ) val record2 = Record( - key = "id_2", + key = CacheKey("id_2"), fields = mapOf( "a" to CacheKey("id_1"), ) @@ -255,7 +240,7 @@ class MemoryCacheTest { val records = listOf(record1, record2) lruCache.merge(records, CacheHeaders.NONE, DefaultRecordMerger) - assertTrue(lruCache.remove(CacheKey(record2.key), cascade = false)) + assertTrue(lruCache.remove(record2.key, cascade = false)) assertNotNull(lruCache.loadRecord(record1.key, CacheHeaders.NONE)) } @@ -264,7 +249,7 @@ class MemoryCacheTest { val lruCache = createCache() val record1 = Record( - key = "id_1", + key = CacheKey("id_1"), fields = mapOf( "a" to "stringValueA", "b" to "stringValueB" @@ -272,7 +257,7 @@ class MemoryCacheTest { ) val record2 = Record( - key = "id_2", + key = CacheKey("id_2"), fields = mapOf( "a" to CacheKey("id_1"), ) @@ -281,7 +266,7 @@ class MemoryCacheTest { val records = listOf(record1, record2) lruCache.merge(records, CacheHeaders.NONE, DefaultRecordMerger) - assertTrue(lruCache.remove(CacheKey(record2.key), cascade = true)) + assertTrue(lruCache.remove(record2.key, cascade = true)) assertNull(lruCache.loadRecord(record1.key, CacheHeaders.NONE)) } @@ -301,7 +286,7 @@ class MemoryCacheTest { private fun createTestRecord(id: String): Record { return Record( - key = "key$id", + key = CacheKey("key$id"), fields = mapOf( "field1" to "stringValueA$id", "field2" to "stringValueB$id" diff --git a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/RecordWeigherTest.kt b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/RecordWeigherTest.kt index bbb949ec..51cac0df 100644 --- a/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/RecordWeigherTest.kt +++ b/normalized-cache-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/RecordWeigherTest.kt @@ -19,7 +19,7 @@ class RecordWeigherTest { val expectedCacheKeyList = listOf(CacheKey("bar"), CacheKey("baz")) val expectedScalarList = listOf("scalarOne", "scalarTwo") val record = Record( - key = "root", + key = CacheKey("root"), fields = mapOf( "double" to expectedDouble, "string" to expectedStringValue, diff --git a/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api b/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api index 131ffa74..66bc8614 100644 --- a/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api +++ b/normalized-cache-sqlite-incubating/api/android/normalized-cache-sqlite-incubating.api @@ -12,13 +12,13 @@ public final class com/apollographql/cache/normalized/sql/ApolloInitializer$Comp public final class com/apollographql/cache/normalized/sql/SqlNormalizedCache : com/apollographql/cache/normalized/api/NormalizedCache { public fun clearAll ()V public fun dump ()Ljava/util/Map; - public fun loadRecord (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; + public fun loadRecord-eNSUWrY (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; public fun loadRecords (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Collection; public fun merge (Lcom/apollographql/cache/normalized/api/Record;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public fun merge (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; - public fun remove (Lcom/apollographql/cache/normalized/api/CacheKey;Z)Z - public fun remove (Ljava/lang/String;)I public fun remove (Ljava/util/Collection;Z)I + public fun remove-eNSUWrY (Ljava/lang/String;Z)Z + public fun trim (JF)J } public final class com/apollographql/cache/normalized/sql/SqlNormalizedCacheFactory_androidKt { @@ -38,113 +38,68 @@ public final class com/apollographql/cache/normalized/sql/VersionKt { public static final field VERSION Ljava/lang/String; } -public abstract interface class com/apollographql/cache/normalized/sql/internal/blob/BlobDatabase : app/cash/sqldelight/Transacter { - public static final field Companion Lcom/apollographql/cache/normalized/sql/internal/blob/BlobDatabase$Companion; - public abstract fun getBlobQueries ()Lcom/apollographql/cache/normalized/sql/internal/blob/BlobQueries; -} - -public final class com/apollographql/cache/normalized/sql/internal/blob/BlobDatabase$Companion { - public final fun getSchema ()Lapp/cash/sqldelight/db/SqlSchema; - public final fun invoke (Lapp/cash/sqldelight/db/SqlDriver;)Lcom/apollographql/cache/normalized/sql/internal/blob/BlobDatabase; -} - -public final class com/apollographql/cache/normalized/sql/internal/blob/BlobQueries : app/cash/sqldelight/TransacterImpl { - public fun (Lapp/cash/sqldelight/db/SqlDriver;)V - public final fun changes ()Lapp/cash/sqldelight/ExecutableQuery; - public final fun delete (Ljava/lang/String;)V - public final fun deleteAll ()V - public final fun deleteRecords (Ljava/util/Collection;)V - public final fun deleteRecordsWithKeyMatching (Ljava/lang/String;Ljava/lang/String;)V - public final fun insert (Ljava/lang/String;[B)V - public final fun recordForKey (Ljava/lang/String;)Lapp/cash/sqldelight/Query; - public final fun recordForKey (Ljava/lang/String;Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; - public final fun recordsForKeys (Ljava/util/Collection;)Lapp/cash/sqldelight/Query; - public final fun recordsForKeys (Ljava/util/Collection;Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; - public final fun selectRecords ()Lapp/cash/sqldelight/Query; - public final fun selectRecords (Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; - public final fun update ([BLjava/lang/String;)V -} - -public final class com/apollographql/cache/normalized/sql/internal/blob/Blobs { - public fun (Ljava/lang/String;[B)V +public final class com/apollographql/cache/normalized/sql/internal/record/Record { + public fun (Ljava/lang/String;[BJ)V public final fun component1 ()Ljava/lang/String; public final fun component2 ()[B - public final fun copy (Ljava/lang/String;[B)Lcom/apollographql/cache/normalized/sql/internal/blob/Blobs; - public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/blob/Blobs;Ljava/lang/String;[BILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/blob/Blobs; + public final fun component3 ()J + public final fun copy (Ljava/lang/String;[BJ)Lcom/apollographql/cache/normalized/sql/internal/record/Record; + public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/record/Record;Ljava/lang/String;[BJILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/record/Record; public fun equals (Ljava/lang/Object;)Z - public final fun getBlob ()[B public final fun getKey ()Ljava/lang/String; + public final fun getRecord ()[B + public final fun getUpdate_date ()J public fun hashCode ()I public fun toString ()Ljava/lang/String; } -public abstract interface class com/apollographql/cache/normalized/sql/internal/blob2/Blob2Database : app/cash/sqldelight/Transacter { - public static final field Companion Lcom/apollographql/cache/normalized/sql/internal/blob2/Blob2Database$Companion; - public abstract fun getBlob2Queries ()Lcom/apollographql/cache/normalized/sql/internal/blob2/Blob2Queries; -} - -public final class com/apollographql/cache/normalized/sql/internal/blob2/Blob2Database$Companion { - public final fun getSchema ()Lapp/cash/sqldelight/db/SqlSchema; - public final fun invoke (Lapp/cash/sqldelight/db/SqlDriver;)Lcom/apollographql/cache/normalized/sql/internal/blob2/Blob2Database; -} - -public final class com/apollographql/cache/normalized/sql/internal/blob2/Blob2Queries : app/cash/sqldelight/TransacterImpl { +public final class com/apollographql/cache/normalized/sql/internal/record/RecordQueries : app/cash/sqldelight/TransacterImpl { public fun (Lapp/cash/sqldelight/db/SqlDriver;)V public final fun changes ()Lapp/cash/sqldelight/ExecutableQuery; public final fun count ()Lapp/cash/sqldelight/Query; - public final fun delete (Ljava/lang/String;)V - public final fun deleteAll ()V + public final fun deleteAllRecords ()V public final fun deleteRecords (Ljava/util/Collection;)V - public final fun deleteRecordsWithKeyMatching (Ljava/lang/String;Ljava/lang/String;)V - public final fun insert (Ljava/lang/String;[BLjava/lang/Long;)V - public final fun recordForKey (Ljava/lang/String;)Lapp/cash/sqldelight/Query; - public final fun recordForKey (Ljava/lang/String;Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; - public final fun recordsForKeys (Ljava/util/Collection;)Lapp/cash/sqldelight/Query; - public final fun recordsForKeys (Ljava/util/Collection;Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; - public final fun selectRecords ()Lapp/cash/sqldelight/Query; - public final fun selectRecords (Lkotlin/jvm/functions/Function3;)Lapp/cash/sqldelight/Query; - public final fun trim (J)V - public final fun update ([BLjava/lang/Long;Ljava/lang/String;)V + public final fun insertOrUpdateRecord (Ljava/lang/String;[BJ)V + public final fun selectAllRecords ()Lapp/cash/sqldelight/Query; + public final fun selectAllRecords (Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; + public final fun selectRecords (Ljava/util/Collection;)Lapp/cash/sqldelight/Query; + public final fun selectRecords (Ljava/util/Collection;Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; + public final fun trimByUpdateDate (J)V } -public final class com/apollographql/cache/normalized/sql/internal/blob2/Blobs { - public fun (Ljava/lang/String;[BLjava/lang/Long;)V +public final class com/apollographql/cache/normalized/sql/internal/record/SelectAllRecords { + public fun (Ljava/lang/String;[B)V public final fun component1 ()Ljava/lang/String; public final fun component2 ()[B - public final fun component3 ()Ljava/lang/Long; - public final fun copy (Ljava/lang/String;[BLjava/lang/Long;)Lcom/apollographql/cache/normalized/sql/internal/blob2/Blobs; - public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/blob2/Blobs;Ljava/lang/String;[BLjava/lang/Long;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/blob2/Blobs; + public final fun copy (Ljava/lang/String;[B)Lcom/apollographql/cache/normalized/sql/internal/record/SelectAllRecords; + public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/record/SelectAllRecords;Ljava/lang/String;[BILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/record/SelectAllRecords; public fun equals (Ljava/lang/Object;)Z - public final fun getBlob ()[B - public final fun getDate ()Ljava/lang/Long; public final fun getKey ()Ljava/lang/String; + public final fun getRecord ()[B public fun hashCode ()I public fun toString ()Ljava/lang/String; } -public final class com/apollographql/cache/normalized/sql/internal/blob2/RecordForKey { +public final class com/apollographql/cache/normalized/sql/internal/record/SelectRecords { public fun (Ljava/lang/String;[B)V public final fun component1 ()Ljava/lang/String; public final fun component2 ()[B - public final fun copy (Ljava/lang/String;[B)Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordForKey; - public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordForKey;Ljava/lang/String;[BILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordForKey; + public final fun copy (Ljava/lang/String;[B)Lcom/apollographql/cache/normalized/sql/internal/record/SelectRecords; + public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/record/SelectRecords;Ljava/lang/String;[BILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/record/SelectRecords; public fun equals (Ljava/lang/Object;)Z - public final fun getBlob ()[B public final fun getKey ()Ljava/lang/String; + public final fun getRecord ()[B public fun hashCode ()I public fun toString ()Ljava/lang/String; } -public final class com/apollographql/cache/normalized/sql/internal/blob2/RecordsForKeys { - public fun (Ljava/lang/String;[B)V - public final fun component1 ()Ljava/lang/String; - public final fun component2 ()[B - public final fun copy (Ljava/lang/String;[B)Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordsForKeys; - public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordsForKeys;Ljava/lang/String;[BILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordsForKeys; - public fun equals (Ljava/lang/Object;)Z - public final fun getBlob ()[B - public final fun getKey ()Ljava/lang/String; - public fun hashCode ()I - public fun toString ()Ljava/lang/String; +public abstract interface class com/apollographql/cache/normalized/sql/internal/record/SqlRecordDatabase : app/cash/sqldelight/Transacter { + public static final field Companion Lcom/apollographql/cache/normalized/sql/internal/record/SqlRecordDatabase$Companion; + public abstract fun getRecordQueries ()Lcom/apollographql/cache/normalized/sql/internal/record/RecordQueries; +} + +public final class com/apollographql/cache/normalized/sql/internal/record/SqlRecordDatabase$Companion { + public final fun getSchema ()Lapp/cash/sqldelight/db/SqlSchema; + public final fun invoke (Lapp/cash/sqldelight/db/SqlDriver;)Lcom/apollographql/cache/normalized/sql/internal/record/SqlRecordDatabase; } diff --git a/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api b/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api index 5ec64ca0..885fb475 100644 --- a/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api +++ b/normalized-cache-sqlite-incubating/api/jvm/normalized-cache-sqlite-incubating.api @@ -1,13 +1,13 @@ public final class com/apollographql/cache/normalized/sql/SqlNormalizedCache : com/apollographql/cache/normalized/api/NormalizedCache { public fun clearAll ()V public fun dump ()Ljava/util/Map; - public fun loadRecord (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; + public fun loadRecord-eNSUWrY (Ljava/lang/String;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Lcom/apollographql/cache/normalized/api/Record; public fun loadRecords (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;)Ljava/util/Collection; public fun merge (Lcom/apollographql/cache/normalized/api/Record;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; public fun merge (Ljava/util/Collection;Lcom/apollographql/cache/normalized/api/CacheHeaders;Lcom/apollographql/cache/normalized/api/RecordMerger;)Ljava/util/Set; - public fun remove (Lcom/apollographql/cache/normalized/api/CacheKey;Z)Z - public fun remove (Ljava/lang/String;)I public fun remove (Ljava/util/Collection;Z)I + public fun remove-eNSUWrY (Ljava/lang/String;Z)Z + public fun trim (JF)J } public final class com/apollographql/cache/normalized/sql/SqlNormalizedCacheFactory_jvmKt { @@ -19,121 +19,72 @@ public final class com/apollographql/cache/normalized/sql/SqlNormalizedCacheFact public static synthetic fun SqlNormalizedCacheFactory$default (Ljava/lang/String;Ljava/util/Properties;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/api/NormalizedCacheFactory; } -public final class com/apollographql/cache/normalized/sql/TrimmableNormalizedCacheFactory : com/apollographql/cache/normalized/api/NormalizedCacheFactory { - public fun create ()Lcom/apollographql/cache/normalized/api/NormalizedCache; -} - public final class com/apollographql/cache/normalized/sql/VersionKt { public static final field VERSION Ljava/lang/String; } -public abstract interface class com/apollographql/cache/normalized/sql/internal/blob/BlobDatabase : app/cash/sqldelight/Transacter { - public static final field Companion Lcom/apollographql/cache/normalized/sql/internal/blob/BlobDatabase$Companion; - public abstract fun getBlobQueries ()Lcom/apollographql/cache/normalized/sql/internal/blob/BlobQueries; -} - -public final class com/apollographql/cache/normalized/sql/internal/blob/BlobDatabase$Companion { - public final fun getSchema ()Lapp/cash/sqldelight/db/SqlSchema; - public final fun invoke (Lapp/cash/sqldelight/db/SqlDriver;)Lcom/apollographql/cache/normalized/sql/internal/blob/BlobDatabase; -} - -public final class com/apollographql/cache/normalized/sql/internal/blob/BlobQueries : app/cash/sqldelight/TransacterImpl { - public fun (Lapp/cash/sqldelight/db/SqlDriver;)V - public final fun changes ()Lapp/cash/sqldelight/ExecutableQuery; - public final fun delete (Ljava/lang/String;)V - public final fun deleteAll ()V - public final fun deleteRecords (Ljava/util/Collection;)V - public final fun deleteRecordsWithKeyMatching (Ljava/lang/String;Ljava/lang/String;)V - public final fun insert (Ljava/lang/String;[B)V - public final fun recordForKey (Ljava/lang/String;)Lapp/cash/sqldelight/Query; - public final fun recordForKey (Ljava/lang/String;Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; - public final fun recordsForKeys (Ljava/util/Collection;)Lapp/cash/sqldelight/Query; - public final fun recordsForKeys (Ljava/util/Collection;Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; - public final fun selectRecords ()Lapp/cash/sqldelight/Query; - public final fun selectRecords (Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; - public final fun update ([BLjava/lang/String;)V -} - -public final class com/apollographql/cache/normalized/sql/internal/blob/Blobs { - public fun (Ljava/lang/String;[B)V +public final class com/apollographql/cache/normalized/sql/internal/record/Record { + public fun (Ljava/lang/String;[BJ)V public final fun component1 ()Ljava/lang/String; public final fun component2 ()[B - public final fun copy (Ljava/lang/String;[B)Lcom/apollographql/cache/normalized/sql/internal/blob/Blobs; - public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/blob/Blobs;Ljava/lang/String;[BILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/blob/Blobs; + public final fun component3 ()J + public final fun copy (Ljava/lang/String;[BJ)Lcom/apollographql/cache/normalized/sql/internal/record/Record; + public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/record/Record;Ljava/lang/String;[BJILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/record/Record; public fun equals (Ljava/lang/Object;)Z - public final fun getBlob ()[B public final fun getKey ()Ljava/lang/String; + public final fun getRecord ()[B + public final fun getUpdate_date ()J public fun hashCode ()I public fun toString ()Ljava/lang/String; } -public abstract interface class com/apollographql/cache/normalized/sql/internal/blob2/Blob2Database : app/cash/sqldelight/Transacter { - public static final field Companion Lcom/apollographql/cache/normalized/sql/internal/blob2/Blob2Database$Companion; - public abstract fun getBlob2Queries ()Lcom/apollographql/cache/normalized/sql/internal/blob2/Blob2Queries; -} - -public final class com/apollographql/cache/normalized/sql/internal/blob2/Blob2Database$Companion { - public final fun getSchema ()Lapp/cash/sqldelight/db/SqlSchema; - public final fun invoke (Lapp/cash/sqldelight/db/SqlDriver;)Lcom/apollographql/cache/normalized/sql/internal/blob2/Blob2Database; -} - -public final class com/apollographql/cache/normalized/sql/internal/blob2/Blob2Queries : app/cash/sqldelight/TransacterImpl { +public final class com/apollographql/cache/normalized/sql/internal/record/RecordQueries : app/cash/sqldelight/TransacterImpl { public fun (Lapp/cash/sqldelight/db/SqlDriver;)V public final fun changes ()Lapp/cash/sqldelight/ExecutableQuery; public final fun count ()Lapp/cash/sqldelight/Query; - public final fun delete (Ljava/lang/String;)V - public final fun deleteAll ()V + public final fun deleteAllRecords ()V public final fun deleteRecords (Ljava/util/Collection;)V - public final fun deleteRecordsWithKeyMatching (Ljava/lang/String;Ljava/lang/String;)V - public final fun insert (Ljava/lang/String;[BLjava/lang/Long;)V - public final fun recordForKey (Ljava/lang/String;)Lapp/cash/sqldelight/Query; - public final fun recordForKey (Ljava/lang/String;Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; - public final fun recordsForKeys (Ljava/util/Collection;)Lapp/cash/sqldelight/Query; - public final fun recordsForKeys (Ljava/util/Collection;Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; - public final fun selectRecords ()Lapp/cash/sqldelight/Query; - public final fun selectRecords (Lkotlin/jvm/functions/Function3;)Lapp/cash/sqldelight/Query; - public final fun trim (J)V - public final fun update ([BLjava/lang/Long;Ljava/lang/String;)V + public final fun insertOrUpdateRecord (Ljava/lang/String;[BJ)V + public final fun selectAllRecords ()Lapp/cash/sqldelight/Query; + public final fun selectAllRecords (Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; + public final fun selectRecords (Ljava/util/Collection;)Lapp/cash/sqldelight/Query; + public final fun selectRecords (Ljava/util/Collection;Lkotlin/jvm/functions/Function2;)Lapp/cash/sqldelight/Query; + public final fun trimByUpdateDate (J)V } -public final class com/apollographql/cache/normalized/sql/internal/blob2/Blobs { - public fun (Ljava/lang/String;[BLjava/lang/Long;)V +public final class com/apollographql/cache/normalized/sql/internal/record/SelectAllRecords { + public fun (Ljava/lang/String;[B)V public final fun component1 ()Ljava/lang/String; public final fun component2 ()[B - public final fun component3 ()Ljava/lang/Long; - public final fun copy (Ljava/lang/String;[BLjava/lang/Long;)Lcom/apollographql/cache/normalized/sql/internal/blob2/Blobs; - public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/blob2/Blobs;Ljava/lang/String;[BLjava/lang/Long;ILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/blob2/Blobs; + public final fun copy (Ljava/lang/String;[B)Lcom/apollographql/cache/normalized/sql/internal/record/SelectAllRecords; + public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/record/SelectAllRecords;Ljava/lang/String;[BILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/record/SelectAllRecords; public fun equals (Ljava/lang/Object;)Z - public final fun getBlob ()[B - public final fun getDate ()Ljava/lang/Long; public final fun getKey ()Ljava/lang/String; + public final fun getRecord ()[B public fun hashCode ()I public fun toString ()Ljava/lang/String; } -public final class com/apollographql/cache/normalized/sql/internal/blob2/RecordForKey { +public final class com/apollographql/cache/normalized/sql/internal/record/SelectRecords { public fun (Ljava/lang/String;[B)V public final fun component1 ()Ljava/lang/String; public final fun component2 ()[B - public final fun copy (Ljava/lang/String;[B)Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordForKey; - public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordForKey;Ljava/lang/String;[BILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordForKey; + public final fun copy (Ljava/lang/String;[B)Lcom/apollographql/cache/normalized/sql/internal/record/SelectRecords; + public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/record/SelectRecords;Ljava/lang/String;[BILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/record/SelectRecords; public fun equals (Ljava/lang/Object;)Z - public final fun getBlob ()[B public final fun getKey ()Ljava/lang/String; + public final fun getRecord ()[B public fun hashCode ()I public fun toString ()Ljava/lang/String; } -public final class com/apollographql/cache/normalized/sql/internal/blob2/RecordsForKeys { - public fun (Ljava/lang/String;[B)V - public final fun component1 ()Ljava/lang/String; - public final fun component2 ()[B - public final fun copy (Ljava/lang/String;[B)Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordsForKeys; - public static synthetic fun copy$default (Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordsForKeys;Ljava/lang/String;[BILjava/lang/Object;)Lcom/apollographql/cache/normalized/sql/internal/blob2/RecordsForKeys; - public fun equals (Ljava/lang/Object;)Z - public final fun getBlob ()[B - public final fun getKey ()Ljava/lang/String; - public fun hashCode ()I - public fun toString ()Ljava/lang/String; +public abstract interface class com/apollographql/cache/normalized/sql/internal/record/SqlRecordDatabase : app/cash/sqldelight/Transacter { + public static final field Companion Lcom/apollographql/cache/normalized/sql/internal/record/SqlRecordDatabase$Companion; + public abstract fun getRecordQueries ()Lcom/apollographql/cache/normalized/sql/internal/record/RecordQueries; +} + +public final class com/apollographql/cache/normalized/sql/internal/record/SqlRecordDatabase$Companion { + public final fun getSchema ()Lapp/cash/sqldelight/db/SqlSchema; + public final fun invoke (Lapp/cash/sqldelight/db/SqlDriver;)Lcom/apollographql/cache/normalized/sql/internal/record/SqlRecordDatabase; } diff --git a/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api b/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api index 25be1d66..1889d7e1 100644 --- a/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api +++ b/normalized-cache-sqlite-incubating/api/normalized-cache-sqlite-incubating.klib.api @@ -6,145 +6,94 @@ // - Show declarations: true // Library unique name: -abstract interface com.apollographql.cache.normalized.sql.internal.blob/BlobDatabase : app.cash.sqldelight/Transacter { // com.apollographql.cache.normalized.sql.internal.blob/BlobDatabase|null[0] - abstract val blobQueries // com.apollographql.cache.normalized.sql.internal.blob/BlobDatabase.blobQueries|{}blobQueries[0] - abstract fun (): com.apollographql.cache.normalized.sql.internal.blob/BlobQueries // com.apollographql.cache.normalized.sql.internal.blob/BlobDatabase.blobQueries.|(){}[0] +abstract interface com.apollographql.cache.normalized.sql.internal.record/SqlRecordDatabase : app.cash.sqldelight/Transacter { // com.apollographql.cache.normalized.sql.internal.record/SqlRecordDatabase|null[0] + abstract val recordQueries // com.apollographql.cache.normalized.sql.internal.record/SqlRecordDatabase.recordQueries|{}recordQueries[0] + abstract fun (): com.apollographql.cache.normalized.sql.internal.record/RecordQueries // com.apollographql.cache.normalized.sql.internal.record/SqlRecordDatabase.recordQueries.|(){}[0] - final object Companion { // com.apollographql.cache.normalized.sql.internal.blob/BlobDatabase.Companion|null[0] - final val Schema // com.apollographql.cache.normalized.sql.internal.blob/BlobDatabase.Companion.Schema|{}Schema[0] - final fun (): app.cash.sqldelight.db/SqlSchema> // com.apollographql.cache.normalized.sql.internal.blob/BlobDatabase.Companion.Schema.|(){}[0] + final object Companion { // com.apollographql.cache.normalized.sql.internal.record/SqlRecordDatabase.Companion|null[0] + final val Schema // com.apollographql.cache.normalized.sql.internal.record/SqlRecordDatabase.Companion.Schema|{}Schema[0] + final fun (): app.cash.sqldelight.db/SqlSchema> // com.apollographql.cache.normalized.sql.internal.record/SqlRecordDatabase.Companion.Schema.|(){}[0] - final fun invoke(app.cash.sqldelight.db/SqlDriver): com.apollographql.cache.normalized.sql.internal.blob/BlobDatabase // com.apollographql.cache.normalized.sql.internal.blob/BlobDatabase.Companion.invoke|invoke(app.cash.sqldelight.db.SqlDriver){}[0] + final fun invoke(app.cash.sqldelight.db/SqlDriver): com.apollographql.cache.normalized.sql.internal.record/SqlRecordDatabase // com.apollographql.cache.normalized.sql.internal.record/SqlRecordDatabase.Companion.invoke|invoke(app.cash.sqldelight.db.SqlDriver){}[0] } } -abstract interface com.apollographql.cache.normalized.sql.internal.blob2/Blob2Database : app.cash.sqldelight/Transacter { // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Database|null[0] - abstract val blob2Queries // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Database.blob2Queries|{}blob2Queries[0] - abstract fun (): com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Database.blob2Queries.|(){}[0] - - final object Companion { // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Database.Companion|null[0] - final val Schema // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Database.Companion.Schema|{}Schema[0] - final fun (): app.cash.sqldelight.db/SqlSchema> // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Database.Companion.Schema.|(){}[0] - - final fun invoke(app.cash.sqldelight.db/SqlDriver): com.apollographql.cache.normalized.sql.internal.blob2/Blob2Database // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Database.Companion.invoke|invoke(app.cash.sqldelight.db.SqlDriver){}[0] - } -} - -final class com.apollographql.cache.normalized.sql.internal.blob/BlobQueries : app.cash.sqldelight/TransacterImpl { // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries|null[0] - constructor (app.cash.sqldelight.db/SqlDriver) // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.|(app.cash.sqldelight.db.SqlDriver){}[0] - - final fun <#A1: kotlin/Any> recordForKey(kotlin/String, kotlin/Function2): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.recordForKey|recordForKey(kotlin.String;kotlin.Function2){0§}[0] - final fun <#A1: kotlin/Any> recordsForKeys(kotlin.collections/Collection, kotlin/Function2): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.recordsForKeys|recordsForKeys(kotlin.collections.Collection;kotlin.Function2){0§}[0] - final fun <#A1: kotlin/Any> selectRecords(kotlin/Function2): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.selectRecords|selectRecords(kotlin.Function2){0§}[0] - final fun changes(): app.cash.sqldelight/ExecutableQuery // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.changes|changes(){}[0] - final fun delete(kotlin/String) // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.delete|delete(kotlin.String){}[0] - final fun deleteAll() // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.deleteAll|deleteAll(){}[0] - final fun deleteRecords(kotlin.collections/Collection) // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.deleteRecords|deleteRecords(kotlin.collections.Collection){}[0] - final fun deleteRecordsWithKeyMatching(kotlin/String, kotlin/String) // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.deleteRecordsWithKeyMatching|deleteRecordsWithKeyMatching(kotlin.String;kotlin.String){}[0] - final fun insert(kotlin/String, kotlin/ByteArray) // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.insert|insert(kotlin.String;kotlin.ByteArray){}[0] - final fun recordForKey(kotlin/String): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.recordForKey|recordForKey(kotlin.String){}[0] - final fun recordsForKeys(kotlin.collections/Collection): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.recordsForKeys|recordsForKeys(kotlin.collections.Collection){}[0] - final fun selectRecords(): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.selectRecords|selectRecords(){}[0] - final fun update(kotlin/ByteArray, kotlin/String) // com.apollographql.cache.normalized.sql.internal.blob/BlobQueries.update|update(kotlin.ByteArray;kotlin.String){}[0] -} - -final class com.apollographql.cache.normalized.sql.internal.blob/Blobs { // com.apollographql.cache.normalized.sql.internal.blob/Blobs|null[0] - constructor (kotlin/String, kotlin/ByteArray) // com.apollographql.cache.normalized.sql.internal.blob/Blobs.|(kotlin.String;kotlin.ByteArray){}[0] - - final val blob // com.apollographql.cache.normalized.sql.internal.blob/Blobs.blob|{}blob[0] - final fun (): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.blob/Blobs.blob.|(){}[0] - final val key // com.apollographql.cache.normalized.sql.internal.blob/Blobs.key|{}key[0] - final fun (): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob/Blobs.key.|(){}[0] - - final fun component1(): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob/Blobs.component1|component1(){}[0] - final fun component2(): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.blob/Blobs.component2|component2(){}[0] - final fun copy(kotlin/String = ..., kotlin/ByteArray = ...): com.apollographql.cache.normalized.sql.internal.blob/Blobs // com.apollographql.cache.normalized.sql.internal.blob/Blobs.copy|copy(kotlin.String;kotlin.ByteArray){}[0] - final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.sql.internal.blob/Blobs.equals|equals(kotlin.Any?){}[0] - final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.sql.internal.blob/Blobs.hashCode|hashCode(){}[0] - final fun toString(): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob/Blobs.toString|toString(){}[0] -} - -final class com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries : app.cash.sqldelight/TransacterImpl { // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries|null[0] - constructor (app.cash.sqldelight.db/SqlDriver) // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.|(app.cash.sqldelight.db.SqlDriver){}[0] - - final fun <#A1: kotlin/Any> recordForKey(kotlin/String, kotlin/Function2): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.recordForKey|recordForKey(kotlin.String;kotlin.Function2){0§}[0] - final fun <#A1: kotlin/Any> recordsForKeys(kotlin.collections/Collection, kotlin/Function2): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.recordsForKeys|recordsForKeys(kotlin.collections.Collection;kotlin.Function2){0§}[0] - final fun <#A1: kotlin/Any> selectRecords(kotlin/Function3): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.selectRecords|selectRecords(kotlin.Function3){0§}[0] - final fun changes(): app.cash.sqldelight/ExecutableQuery // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.changes|changes(){}[0] - final fun count(): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.count|count(){}[0] - final fun delete(kotlin/String) // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.delete|delete(kotlin.String){}[0] - final fun deleteAll() // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.deleteAll|deleteAll(){}[0] - final fun deleteRecords(kotlin.collections/Collection) // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.deleteRecords|deleteRecords(kotlin.collections.Collection){}[0] - final fun deleteRecordsWithKeyMatching(kotlin/String, kotlin/String) // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.deleteRecordsWithKeyMatching|deleteRecordsWithKeyMatching(kotlin.String;kotlin.String){}[0] - final fun insert(kotlin/String, kotlin/ByteArray, kotlin/Long?) // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.insert|insert(kotlin.String;kotlin.ByteArray;kotlin.Long?){}[0] - final fun recordForKey(kotlin/String): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.recordForKey|recordForKey(kotlin.String){}[0] - final fun recordsForKeys(kotlin.collections/Collection): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.recordsForKeys|recordsForKeys(kotlin.collections.Collection){}[0] - final fun selectRecords(): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.selectRecords|selectRecords(){}[0] - final fun trim(kotlin/Long) // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.trim|trim(kotlin.Long){}[0] - final fun update(kotlin/ByteArray, kotlin/Long?, kotlin/String) // com.apollographql.cache.normalized.sql.internal.blob2/Blob2Queries.update|update(kotlin.ByteArray;kotlin.Long?;kotlin.String){}[0] +final class com.apollographql.cache.normalized.sql.internal.record/Record { // com.apollographql.cache.normalized.sql.internal.record/Record|null[0] + constructor (kotlin/String, kotlin/ByteArray, kotlin/Long) // com.apollographql.cache.normalized.sql.internal.record/Record.|(kotlin.String;kotlin.ByteArray;kotlin.Long){}[0] + + final val key // com.apollographql.cache.normalized.sql.internal.record/Record.key|{}key[0] + final fun (): kotlin/String // com.apollographql.cache.normalized.sql.internal.record/Record.key.|(){}[0] + final val record // com.apollographql.cache.normalized.sql.internal.record/Record.record|{}record[0] + final fun (): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.record/Record.record.|(){}[0] + final val update_date // com.apollographql.cache.normalized.sql.internal.record/Record.update_date|{}update_date[0] + final fun (): kotlin/Long // com.apollographql.cache.normalized.sql.internal.record/Record.update_date.|(){}[0] + + final fun component1(): kotlin/String // com.apollographql.cache.normalized.sql.internal.record/Record.component1|component1(){}[0] + final fun component2(): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.record/Record.component2|component2(){}[0] + final fun component3(): kotlin/Long // com.apollographql.cache.normalized.sql.internal.record/Record.component3|component3(){}[0] + final fun copy(kotlin/String = ..., kotlin/ByteArray = ..., kotlin/Long = ...): com.apollographql.cache.normalized.sql.internal.record/Record // com.apollographql.cache.normalized.sql.internal.record/Record.copy|copy(kotlin.String;kotlin.ByteArray;kotlin.Long){}[0] + final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.sql.internal.record/Record.equals|equals(kotlin.Any?){}[0] + final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.sql.internal.record/Record.hashCode|hashCode(){}[0] + final fun toString(): kotlin/String // com.apollographql.cache.normalized.sql.internal.record/Record.toString|toString(){}[0] } -final class com.apollographql.cache.normalized.sql.internal.blob2/Blobs { // com.apollographql.cache.normalized.sql.internal.blob2/Blobs|null[0] - constructor (kotlin/String, kotlin/ByteArray, kotlin/Long?) // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.|(kotlin.String;kotlin.ByteArray;kotlin.Long?){}[0] - - final val blob // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.blob|{}blob[0] - final fun (): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.blob.|(){}[0] - final val date // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.date|{}date[0] - final fun (): kotlin/Long? // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.date.|(){}[0] - final val key // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.key|{}key[0] - final fun (): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.key.|(){}[0] - - final fun component1(): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.component1|component1(){}[0] - final fun component2(): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.component2|component2(){}[0] - final fun component3(): kotlin/Long? // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.component3|component3(){}[0] - final fun copy(kotlin/String = ..., kotlin/ByteArray = ..., kotlin/Long? = ...): com.apollographql.cache.normalized.sql.internal.blob2/Blobs // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.copy|copy(kotlin.String;kotlin.ByteArray;kotlin.Long?){}[0] - final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.equals|equals(kotlin.Any?){}[0] - final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.hashCode|hashCode(){}[0] - final fun toString(): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob2/Blobs.toString|toString(){}[0] +final class com.apollographql.cache.normalized.sql.internal.record/RecordQueries : app.cash.sqldelight/TransacterImpl { // com.apollographql.cache.normalized.sql.internal.record/RecordQueries|null[0] + constructor (app.cash.sqldelight.db/SqlDriver) // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.|(app.cash.sqldelight.db.SqlDriver){}[0] + + final fun <#A1: kotlin/Any> selectAllRecords(kotlin/Function2): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.selectAllRecords|selectAllRecords(kotlin.Function2){0§}[0] + final fun <#A1: kotlin/Any> selectRecords(kotlin.collections/Collection, kotlin/Function2): app.cash.sqldelight/Query<#A1> // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.selectRecords|selectRecords(kotlin.collections.Collection;kotlin.Function2){0§}[0] + final fun changes(): app.cash.sqldelight/ExecutableQuery // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.changes|changes(){}[0] + final fun count(): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.count|count(){}[0] + final fun deleteAllRecords() // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.deleteAllRecords|deleteAllRecords(){}[0] + final fun deleteRecords(kotlin.collections/Collection) // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.deleteRecords|deleteRecords(kotlin.collections.Collection){}[0] + final fun insertOrUpdateRecord(kotlin/String, kotlin/ByteArray, kotlin/Long) // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.insertOrUpdateRecord|insertOrUpdateRecord(kotlin.String;kotlin.ByteArray;kotlin.Long){}[0] + final fun selectAllRecords(): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.selectAllRecords|selectAllRecords(){}[0] + final fun selectRecords(kotlin.collections/Collection): app.cash.sqldelight/Query // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.selectRecords|selectRecords(kotlin.collections.Collection){}[0] + final fun trimByUpdateDate(kotlin/Long) // com.apollographql.cache.normalized.sql.internal.record/RecordQueries.trimByUpdateDate|trimByUpdateDate(kotlin.Long){}[0] } -final class com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey { // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey|null[0] - constructor (kotlin/String, kotlin/ByteArray) // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.|(kotlin.String;kotlin.ByteArray){}[0] +final class com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords { // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords|null[0] + constructor (kotlin/String, kotlin/ByteArray) // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.|(kotlin.String;kotlin.ByteArray){}[0] - final val blob // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.blob|{}blob[0] - final fun (): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.blob.|(){}[0] - final val key // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.key|{}key[0] - final fun (): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.key.|(){}[0] + final val key // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.key|{}key[0] + final fun (): kotlin/String // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.key.|(){}[0] + final val record // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.record|{}record[0] + final fun (): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.record.|(){}[0] - final fun component1(): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.component1|component1(){}[0] - final fun component2(): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.component2|component2(){}[0] - final fun copy(kotlin/String = ..., kotlin/ByteArray = ...): com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.copy|copy(kotlin.String;kotlin.ByteArray){}[0] - final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.equals|equals(kotlin.Any?){}[0] - final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.hashCode|hashCode(){}[0] - final fun toString(): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob2/RecordForKey.toString|toString(){}[0] + final fun component1(): kotlin/String // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.component1|component1(){}[0] + final fun component2(): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.component2|component2(){}[0] + final fun copy(kotlin/String = ..., kotlin/ByteArray = ...): com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.copy|copy(kotlin.String;kotlin.ByteArray){}[0] + final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.equals|equals(kotlin.Any?){}[0] + final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.hashCode|hashCode(){}[0] + final fun toString(): kotlin/String // com.apollographql.cache.normalized.sql.internal.record/SelectAllRecords.toString|toString(){}[0] } -final class com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys { // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys|null[0] - constructor (kotlin/String, kotlin/ByteArray) // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.|(kotlin.String;kotlin.ByteArray){}[0] +final class com.apollographql.cache.normalized.sql.internal.record/SelectRecords { // com.apollographql.cache.normalized.sql.internal.record/SelectRecords|null[0] + constructor (kotlin/String, kotlin/ByteArray) // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.|(kotlin.String;kotlin.ByteArray){}[0] - final val blob // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.blob|{}blob[0] - final fun (): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.blob.|(){}[0] - final val key // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.key|{}key[0] - final fun (): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.key.|(){}[0] + final val key // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.key|{}key[0] + final fun (): kotlin/String // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.key.|(){}[0] + final val record // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.record|{}record[0] + final fun (): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.record.|(){}[0] - final fun component1(): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.component1|component1(){}[0] - final fun component2(): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.component2|component2(){}[0] - final fun copy(kotlin/String = ..., kotlin/ByteArray = ...): com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.copy|copy(kotlin.String;kotlin.ByteArray){}[0] - final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.equals|equals(kotlin.Any?){}[0] - final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.hashCode|hashCode(){}[0] - final fun toString(): kotlin/String // com.apollographql.cache.normalized.sql.internal.blob2/RecordsForKeys.toString|toString(){}[0] + final fun component1(): kotlin/String // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.component1|component1(){}[0] + final fun component2(): kotlin/ByteArray // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.component2|component2(){}[0] + final fun copy(kotlin/String = ..., kotlin/ByteArray = ...): com.apollographql.cache.normalized.sql.internal.record/SelectRecords // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.copy|copy(kotlin.String;kotlin.ByteArray){}[0] + final fun equals(kotlin/Any?): kotlin/Boolean // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.equals|equals(kotlin.Any?){}[0] + final fun hashCode(): kotlin/Int // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.hashCode|hashCode(){}[0] + final fun toString(): kotlin/String // com.apollographql.cache.normalized.sql.internal.record/SelectRecords.toString|toString(){}[0] } final class com.apollographql.cache.normalized.sql/SqlNormalizedCache : com.apollographql.cache.normalized.api/NormalizedCache { // com.apollographql.cache.normalized.sql/SqlNormalizedCache|null[0] final fun clearAll() // com.apollographql.cache.normalized.sql/SqlNormalizedCache.clearAll|clearAll(){}[0] - final fun dump(): kotlin.collections/Map, kotlin.collections/Map> // com.apollographql.cache.normalized.sql/SqlNormalizedCache.dump|dump(){}[0] - final fun loadRecord(kotlin/String, com.apollographql.cache.normalized.api/CacheHeaders): com.apollographql.cache.normalized.api/Record? // com.apollographql.cache.normalized.sql/SqlNormalizedCache.loadRecord|loadRecord(kotlin.String;com.apollographql.cache.normalized.api.CacheHeaders){}[0] - final fun loadRecords(kotlin.collections/Collection, com.apollographql.cache.normalized.api/CacheHeaders): kotlin.collections/Collection // com.apollographql.cache.normalized.sql/SqlNormalizedCache.loadRecords|loadRecords(kotlin.collections.Collection;com.apollographql.cache.normalized.api.CacheHeaders){}[0] + final fun dump(): kotlin.collections/Map, kotlin.collections/Map> // com.apollographql.cache.normalized.sql/SqlNormalizedCache.dump|dump(){}[0] + final fun loadRecord(com.apollographql.cache.normalized.api/CacheKey, com.apollographql.cache.normalized.api/CacheHeaders): com.apollographql.cache.normalized.api/Record? // com.apollographql.cache.normalized.sql/SqlNormalizedCache.loadRecord|loadRecord(com.apollographql.cache.normalized.api.CacheKey;com.apollographql.cache.normalized.api.CacheHeaders){}[0] + final fun loadRecords(kotlin.collections/Collection, com.apollographql.cache.normalized.api/CacheHeaders): kotlin.collections/Collection // com.apollographql.cache.normalized.sql/SqlNormalizedCache.loadRecords|loadRecords(kotlin.collections.Collection;com.apollographql.cache.normalized.api.CacheHeaders){}[0] final fun merge(com.apollographql.cache.normalized.api/Record, com.apollographql.cache.normalized.api/CacheHeaders, com.apollographql.cache.normalized.api/RecordMerger): kotlin.collections/Set // com.apollographql.cache.normalized.sql/SqlNormalizedCache.merge|merge(com.apollographql.cache.normalized.api.Record;com.apollographql.cache.normalized.api.CacheHeaders;com.apollographql.cache.normalized.api.RecordMerger){}[0] final fun merge(kotlin.collections/Collection, com.apollographql.cache.normalized.api/CacheHeaders, com.apollographql.cache.normalized.api/RecordMerger): kotlin.collections/Set // com.apollographql.cache.normalized.sql/SqlNormalizedCache.merge|merge(kotlin.collections.Collection;com.apollographql.cache.normalized.api.CacheHeaders;com.apollographql.cache.normalized.api.RecordMerger){}[0] final fun remove(com.apollographql.cache.normalized.api/CacheKey, kotlin/Boolean): kotlin/Boolean // com.apollographql.cache.normalized.sql/SqlNormalizedCache.remove|remove(com.apollographql.cache.normalized.api.CacheKey;kotlin.Boolean){}[0] final fun remove(kotlin.collections/Collection, kotlin/Boolean): kotlin/Int // com.apollographql.cache.normalized.sql/SqlNormalizedCache.remove|remove(kotlin.collections.Collection;kotlin.Boolean){}[0] - final fun remove(kotlin/String): kotlin/Int // com.apollographql.cache.normalized.sql/SqlNormalizedCache.remove|remove(kotlin.String){}[0] + final fun trim(kotlin/Long, kotlin/Float): kotlin/Long // com.apollographql.cache.normalized.sql/SqlNormalizedCache.trim|trim(kotlin.Long;kotlin.Float){}[0] } final const val com.apollographql.cache.normalized.sql/VERSION // com.apollographql.cache.normalized.sql/VERSION|{}VERSION[0] diff --git a/normalized-cache-sqlite-incubating/build.gradle.kts b/normalized-cache-sqlite-incubating/build.gradle.kts index 762a70e9..66cfcca4 100644 --- a/normalized-cache-sqlite-incubating/build.gradle.kts +++ b/normalized-cache-sqlite-incubating/build.gradle.kts @@ -29,17 +29,11 @@ android { testOptions.targetSdk = 30 } - -configure { - databases.create("BlobDatabase") { - packageName.set("com.apollographql.cache.normalized.sql.internal.blob") - schemaOutputDirectory.set(file("sqldelight/blob/schema")) - srcDirs.setFrom("src/commonMain/sqldelight/blob/") - } - databases.create("Blob2Database") { - packageName.set("com.apollographql.cache.normalized.sql.internal.blob2") - schemaOutputDirectory.set(file("sqldelight/blob2/schema")) - srcDirs.setFrom("src/commonMain/sqldelight/blob2/") +sqldelight { + databases.create("SqlRecordDatabase") { + packageName.set("com.apollographql.cache.normalized.sql.internal.record") + schemaOutputDirectory.set(file("sqldelight/record/schema")) + srcDirs.setFrom("src/commonMain/sqldelight/record/") } } @@ -93,6 +87,7 @@ kotlin { dependencies { implementation(libs.kotlin.test) implementation(libs.apollo.testing.support) + implementation(project(":test-utils")) } } } diff --git a/normalized-cache-sqlite-incubating/sqldelight/blob2/schema/1.db b/normalized-cache-sqlite-incubating/sqldelight/blob2/schema/1.db deleted file mode 100644 index fdeea28e..00000000 Binary files a/normalized-cache-sqlite-incubating/sqldelight/blob2/schema/1.db and /dev/null differ diff --git a/normalized-cache-sqlite-incubating/sqldelight/blob/schema/1.db b/normalized-cache-sqlite-incubating/sqldelight/record/schema/1.db similarity index 96% rename from normalized-cache-sqlite-incubating/sqldelight/blob/schema/1.db rename to normalized-cache-sqlite-incubating/sqldelight/record/schema/1.db index 17bc7efe..8d1e4af6 100644 Binary files a/normalized-cache-sqlite-incubating/sqldelight/blob/schema/1.db and b/normalized-cache-sqlite-incubating/sqldelight/record/schema/1.db differ diff --git a/normalized-cache-sqlite-incubating/sqldelight/blob/schema/2.db b/normalized-cache-sqlite-incubating/sqldelight/record/schema/2.db similarity index 96% rename from normalized-cache-sqlite-incubating/sqldelight/blob/schema/2.db rename to normalized-cache-sqlite-incubating/sqldelight/record/schema/2.db index 17bc7efe..8d1e4af6 100644 Binary files a/normalized-cache-sqlite-incubating/sqldelight/blob/schema/2.db and b/normalized-cache-sqlite-incubating/sqldelight/record/schema/2.db differ diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCache.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCache.kt index 2420e2ef..882c8c82 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCache.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCache.kt @@ -2,7 +2,6 @@ package com.apollographql.cache.normalized.sql import com.apollographql.apollo.exception.apolloExceptionHandler import com.apollographql.cache.normalized.api.ApolloCacheHeaders -import com.apollographql.cache.normalized.api.ApolloCacheHeaders.EVICT_AFTER_READ import com.apollographql.cache.normalized.api.CacheHeaders import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.NormalizedCache @@ -17,66 +16,29 @@ class SqlNormalizedCache internal constructor( private val recordDatabase: RecordDatabase, ) : NormalizedCache { - private fun maybeTransaction(condition: Boolean, block: () -> T): T { - return if (condition) { - recordDatabase.transaction { - block() - } - } else { - block() - } - } - - override fun loadRecord(key: String, cacheHeaders: CacheHeaders): Record? { - if (cacheHeaders.hasHeader(ApolloCacheHeaders.MEMORY_CACHE_ONLY)) { - return null - } - val evictAfterRead = cacheHeaders.hasHeader(EVICT_AFTER_READ) - return maybeTransaction(evictAfterRead) { - try { - recordDatabase.select(key) - } catch (e: Exception) { - // Unable to read the record from the database, it is possibly corrupted - treat this as a cache miss - apolloExceptionHandler(Exception("Unable to read a record from the database", e)) - null - }?.also { - if (evictAfterRead) { - recordDatabase.delete(key) - } - } - } + override fun loadRecord(key: CacheKey, cacheHeaders: CacheHeaders): Record? { + return loadRecords(keys = listOf(key), cacheHeaders = cacheHeaders).firstOrNull() } - override fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection { + override fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection { if (cacheHeaders.hasHeader(ApolloCacheHeaders.MEMORY_CACHE_ONLY)) { return emptyList() } - val evictAfterRead = cacheHeaders.hasHeader(EVICT_AFTER_READ) - return maybeTransaction(evictAfterRead) { - try { - internalGetRecords(keys) - } catch (e: Exception) { - // Unable to read the records from the database, it is possibly corrupted - treat this as a cache miss - apolloExceptionHandler(Exception("Unable to read records from the database", e)) - emptyList() - }.also { - if (evictAfterRead) { - it.forEach { record -> - recordDatabase.delete(record.key) - } - } - } + return try { + selectRecords(keys) + } catch (e: Exception) { + // Unable to read the records from the database, it is possibly corrupted - treat this as a cache miss + apolloExceptionHandler(Exception("Unable to read records from the database", e)) + emptyList() } } override fun clearAll() { - recordDatabase.deleteAll() + recordDatabase.deleteAllRecords() } override fun remove(cacheKey: CacheKey, cascade: Boolean): Boolean { - return recordDatabase.transaction { - internalDeleteRecords(setOf(cacheKey.key), cascade) > 0 - } + return remove(cacheKeys = listOf(cacheKey), cascade = cascade) > 0 } override fun remove(cacheKeys: Collection, cascade: Boolean): Int { @@ -85,24 +47,8 @@ class SqlNormalizedCache internal constructor( } } - override fun remove(pattern: String): Int { - return recordDatabase.transaction { - recordDatabase.deleteMatching(pattern) - recordDatabase.changes().toInt() - } - } - override fun merge(record: Record, cacheHeaders: CacheHeaders, recordMerger: RecordMerger): Set { - if (cacheHeaders.hasHeader(ApolloCacheHeaders.DO_NOT_STORE) || cacheHeaders.hasHeader(ApolloCacheHeaders.MEMORY_CACHE_ONLY)) { - return emptySet() - } - return try { - internalUpdateRecord(record = record, cacheHeaders = cacheHeaders, recordMerger = recordMerger) - } catch (e: Exception) { - // Unable to merge the record in the database, it is possibly corrupted - treat this as a cache miss - apolloExceptionHandler(Exception("Unable to merge a record from the database", e)) - emptySet() - } + return merge(records = listOf(record), cacheHeaders = cacheHeaders, recordMerger = recordMerger) } override fun merge(records: Collection, cacheHeaders: CacheHeaders, recordMerger: RecordMerger): Set { @@ -113,24 +59,28 @@ class SqlNormalizedCache internal constructor( internalUpdateRecords(records = records, cacheHeaders = cacheHeaders, recordMerger = recordMerger) } catch (e: Exception) { // Unable to merge the records in the database, it is possibly corrupted - treat this as a cache miss - apolloExceptionHandler(Exception("Unable to merge records from the database", e)) + apolloExceptionHandler(Exception("Unable to merge records into the database", e)) emptySet() } } - override fun dump(): Map, Map> { - return mapOf(this::class to recordDatabase.selectAll().associateBy { it.key }) + override fun dump(): Map, Map> { + return mapOf(this::class to recordDatabase.selectAllRecords().associateBy { it.key }) } - private fun getReferencedKeysRecursively(keys: Collection, visited: MutableSet = mutableSetOf()): Set { + private fun getReferencedKeysRecursively( + keys: Collection, + visited: MutableSet = mutableSetOf(), + ): Set { if (keys.isEmpty()) return emptySet() - val referencedKeys = recordDatabase.select(keys - visited).flatMap { it.referencedFields() }.map { it.key }.toSet() + val referencedKeys = + recordDatabase.selectRecords(keys - visited).flatMap { it.referencedFields() }.map { it.key }.toSet() visited += keys return referencedKeys + getReferencedKeysRecursively(referencedKeys, visited) } /** - * Assume an enclosing transaction + * Assumes an enclosing transaction */ private fun internalDeleteRecords(keys: Collection, cascade: Boolean): Int { val referencedKeys = if (cascade) { @@ -139,70 +89,57 @@ class SqlNormalizedCache internal constructor( emptySet() } return (keys + referencedKeys).chunked(999).sumOf { chunkedKeys -> - recordDatabase.delete(chunkedKeys) + recordDatabase.deleteRecords(chunkedKeys) recordDatabase.changes().toInt() } } /** - * Update records, loading the previous ones - * - * This is an optimization over [internalUpdateRecord] + * Updates records. + * The [records] are merged using the given [recordMerger], requiring to load the existing records from the db first. */ private fun internalUpdateRecords(records: Collection, cacheHeaders: CacheHeaders, recordMerger: RecordMerger): Set { - var updatedRecordKeys: Set = emptySet() val receivedDate = cacheHeaders.headerValue(ApolloCacheHeaders.RECEIVED_DATE) val expirationDate = cacheHeaders.headerValue(ApolloCacheHeaders.EXPIRATION_DATE) - recordDatabase.transaction { - val oldRecords = internalGetRecords( - keys = records.map { it.key }, - ).associateBy { it.key } - - updatedRecordKeys = records.flatMap { record -> - val oldRecord = oldRecords[record.key] - if (oldRecord == null) { - recordDatabase.insert(record.withDates(receivedDate = receivedDate, expirationDate = expirationDate)) + return recordDatabase.transaction { + val existingRecords = selectRecords(records.map { it.key }).associateBy { it.key } + records.flatMap { record -> + val existingRecord = existingRecords[record.key] + if (existingRecord == null) { + recordDatabase.insertOrUpdateRecord(record.withDates(receivedDate = receivedDate, expirationDate = expirationDate)) record.fieldKeys() } else { - val (mergedRecord, changedKeys) = recordMerger.merge(RecordMergerContext(existing = oldRecord, incoming = record, cacheHeaders = cacheHeaders)) + val (mergedRecord, changedKeys) = recordMerger.merge(RecordMergerContext(existing = existingRecord, incoming = record.withDates(receivedDate = receivedDate, expirationDate = expirationDate), cacheHeaders = cacheHeaders)) if (mergedRecord.isNotEmpty()) { - recordDatabase.update(mergedRecord.withDates(receivedDate = receivedDate, expirationDate = expirationDate)) + recordDatabase.insertOrUpdateRecord(mergedRecord) } changedKeys } }.toSet() } - return updatedRecordKeys } /** - * Update a single [Record], loading the previous one + * Loads a list of records, making sure to not query more than 999 at a time + * to help with the SQLite limitations */ - private fun internalUpdateRecord(record: Record, cacheHeaders: CacheHeaders, recordMerger: RecordMerger): Set { - val receivedDate = cacheHeaders.headerValue(ApolloCacheHeaders.RECEIVED_DATE) - val expirationDate = cacheHeaders.headerValue(ApolloCacheHeaders.EXPIRATION_DATE) - return recordDatabase.transaction { - val oldRecord = recordDatabase.select(record.key) - if (oldRecord == null) { - recordDatabase.insert(record.withDates(receivedDate = receivedDate, expirationDate = expirationDate)) - record.fieldKeys() - } else { - val (mergedRecord, changedKeys) = recordMerger.merge(RecordMergerContext(existing = oldRecord, incoming = record, cacheHeaders = cacheHeaders)) - if (mergedRecord.isNotEmpty()) { - recordDatabase.update(mergedRecord.withDates(receivedDate = receivedDate, expirationDate = expirationDate)) + private fun selectRecords(keys: Collection): List { + return keys + .map { it.key } + .chunked(999).flatMap { chunkedKeys -> + recordDatabase.selectRecords(chunkedKeys) } - changedKeys - } - } } - /** - * Loads a list of records, making sure to not query more than 999 at a time - * to help with the SQLite limitations - */ - private fun internalGetRecords(keys: Collection): List { - return keys.chunked(999).flatMap { chunkedKeys -> - recordDatabase.select(chunkedKeys) + override fun trim(maxSizeBytes: Long, trimFactor: Float): Long { + val size = recordDatabase.databaseSize() + return if (size >= maxSizeBytes) { + val count = recordDatabase.count().executeAsOne() + recordDatabase.trimByUpdateDate((count * trimFactor).toLong()) + recordDatabase.vacuum() + recordDatabase.databaseSize() + } else { + size } } } diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/Blob2RecordDatabase.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/Blob2RecordDatabase.kt deleted file mode 100644 index 97b2bfad..00000000 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/Blob2RecordDatabase.kt +++ /dev/null @@ -1,67 +0,0 @@ -package com.apollographql.cache.normalized.sql.internal - -import com.apollographql.cache.normalized.api.ApolloCacheHeaders -import com.apollographql.cache.normalized.api.Record -import com.apollographql.cache.normalized.sql.internal.blob2.Blob2Queries - -internal class Blob2RecordDatabase(private val blobQueries: Blob2Queries) : RecordDatabase { - override fun select(key: String): Record? { - return blobQueries.recordForKey(key).executeAsList() - .map { - BlobRecordSerializer.deserialize(it.key, it.blob) - } - .singleOrNull() - } - - override fun select(keys: Collection): List { - return blobQueries.recordsForKeys(keys).executeAsList() - .map { - BlobRecordSerializer.deserialize(it.key, it.blob) - } - } - - override fun transaction(noEnclosing: Boolean, body: () -> T): T { - return blobQueries.transactionWithResult { - body() - } - } - - override fun delete(key: String) { - blobQueries.delete(key) - } - - override fun delete(keys: Collection) { - blobQueries.deleteRecords(keys) - } - - override fun deleteMatching(pattern: String) { - blobQueries.deleteRecordsWithKeyMatching(pattern, "\\") - } - - override fun deleteAll() { - blobQueries.deleteAll() - } - - override fun changes(): Long { - return blobQueries.changes().executeAsOne() - } - - override fun insert(record: Record) { - blobQueries.insert(record.key, BlobRecordSerializer.serialize(record), record.receivedDate()) - } - - override fun update(record: Record) { - blobQueries.update(BlobRecordSerializer.serialize(record), record.receivedDate(), record.key) - } - - override fun selectAll(): List { - TODO("Not yet implemented") - } - - /** - * The most recent of the fields' received dates. - */ - private fun Record.receivedDate(): Long? { - return metadata.values.mapNotNull { it[ApolloCacheHeaders.RECEIVED_DATE] as? Long }.maxOrNull() - } -} diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordDatabase.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordDatabase.kt deleted file mode 100644 index 99c2b28e..00000000 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordDatabase.kt +++ /dev/null @@ -1,61 +0,0 @@ -package com.apollographql.cache.normalized.sql.internal - -import com.apollographql.cache.normalized.api.Record -import com.apollographql.cache.normalized.sql.internal.blob.BlobQueries - -internal class BlobRecordDatabase(private val blobQueries: BlobQueries) : RecordDatabase { - override fun select(key: String): Record? { - return blobQueries.recordForKey(key).executeAsList() - .map { - BlobRecordSerializer.deserialize(it.key, it.blob) - } - .singleOrNull() - } - - override fun select(keys: Collection): List { - return blobQueries.recordsForKeys(keys).executeAsList() - .map { - BlobRecordSerializer.deserialize(it.key, it.blob) - } - } - - override fun transaction(noEnclosing: Boolean, body: () -> T): T { - return blobQueries.transactionWithResult { - body() - } - } - - override fun delete(key: String) { - blobQueries.delete(key) - } - - override fun delete(keys: Collection) { - blobQueries.deleteRecords(keys) - } - - override fun deleteMatching(pattern: String) { - blobQueries.deleteRecordsWithKeyMatching(pattern, "\\") - } - - override fun deleteAll() { - blobQueries.deleteAll() - } - - override fun changes(): Long { - return blobQueries.changes().executeAsOne() - } - - override fun insert(record: Record) { - blobQueries.insert(record.key, BlobRecordSerializer.serialize(record)) - } - - override fun update(record: Record) { - blobQueries.update(BlobRecordSerializer.serialize(record), record.key) - } - - override fun selectAll(): List { - return blobQueries.selectRecords().executeAsList().map { - BlobRecordSerializer.deserialize(it.key, it.blob) - } - } -} diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordSerializer.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordSerializer.kt deleted file mode 100644 index 682e0dea..00000000 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/BlobRecordSerializer.kt +++ /dev/null @@ -1,225 +0,0 @@ -package com.apollographql.cache.normalized.sql.internal - -import com.apollographql.apollo.api.Error -import com.apollographql.apollo.api.Error.Builder -import com.apollographql.apollo.api.json.JsonNumber -import com.apollographql.cache.normalized.api.CacheKey -import com.apollographql.cache.normalized.api.Record -import okio.Buffer -import okio.utf8Size - -/** - * A serializer that serializes/deserializes a [Record] to a [ByteArray] - * - * It's a very basic implementation that encodes a record like below - * - * number of entries - Int - * ------ - * name of the entry0 - String - * timestamp of entry0 - Long? - * value of entry0 - Any? - * ------ - * name of the entry1 - String - * timestamp of entry1 - Long? - * value of entry1 - Any? - * ------ - * etc... - * - * For each value, the type of the value is encoded using a single identifier byte so that deserialization can deserialize - * to the expected type - * - * This should be revisited/optimized - */ -internal object BlobRecordSerializer { - fun serialize(record: Record): ByteArray { - val buffer = Buffer() - - buffer.writeAny(record.metadata) - val keys = record.fields.keys - buffer.writeInt(keys.size) - for (key in keys) { - buffer.writeString(key) - buffer.writeAny(record.fields[key]) - } - - return buffer.readByteArray() - } - - /** - * returns the [Record] for the given Json - * - * @throws Exception if the [Record] cannot be deserialized - */ - @Suppress("UNCHECKED_CAST") - fun deserialize(key: String, bytes: ByteArray): Record { - val buffer = Buffer().write(bytes) - - val metadata = buffer.readAny() as Map> - - val fields = mutableMapOf() - val size = buffer.readInt() - - for (i in 0.until(size)) { - val name = buffer.readString() - fields[name] = buffer.readAny() - } - - return Record(key, fields, null, metadata) - } - - private fun Buffer.writeString(value: String) { - writeInt(value.utf8Size().toInt()) - writeUtf8(value) - } - - private fun Buffer.readString(): String { - return readUtf8(readInt().toLong()) - } - - private fun Buffer.writeAny(value: Any?) { - when (value) { - is String -> { - buffer.writeByte(STRING) - buffer.writeString(value) - } - - is Int -> { - buffer.writeByte(INT) - buffer.writeInt(value) - } - - is Long -> { - buffer.writeByte(LONG) - buffer.writeLong(value) - } - - is Double -> { - buffer.writeByte(DOUBLE) - buffer.writeLong(value.toBits()) - } - - is JsonNumber -> { - buffer.writeByte(JSON_NUMBER) - buffer.writeString(value.value) - } - - is Boolean -> { - buffer.writeByte(BOOLEAN) - buffer.writeByte(if (value) 1 else 0) - } - - is CacheKey -> { - buffer.writeByte(CACHE_KEY) - buffer.writeString(value.key) - } - - is List<*> -> { - buffer.writeByte(LIST) - buffer.writeInt(value.size) - value.forEach { - buffer.writeAny(it) - } - } - - is Map<*, *> -> { - buffer.writeByte(MAP) - buffer.writeInt(value.size) - @Suppress("UNCHECKED_CAST") - value as Map - value.forEach { - buffer.writeString(it.key) - buffer.writeAny(it.value) - } - } - - null -> { - buffer.writeByte(NULL) - } - - is Error -> { - buffer.writeByte(ERROR) - buffer.writeString(value.message) - buffer.writeInt(value.locations?.size ?: 0) - for (location in value.locations.orEmpty()) { - buffer.writeInt(location.line) - buffer.writeInt(location.column) - } - buffer.writeInt(value.path?.size ?: 0) - for (path in value.path.orEmpty()) { - buffer.writeAny(path) - } - buffer.writeAny(value.extensions) - } - - else -> error("Trying to write unsupported Record value: $value") - } - } - - private fun Buffer.readAny(): Any? { - return when (val what = readByte().toInt()) { - STRING -> readString() - INT -> readInt() - LONG -> readLong() - DOUBLE -> Double.fromBits(readLong()) - JSON_NUMBER -> JsonNumber(readString()) - BOOLEAN -> readByte() > 0 - CACHE_KEY -> { - CacheKey(readString()) - } - - LIST -> { - val size = readInt() - 0.until(size).map { - readAny() - } - } - - MAP -> { - val size = readInt() - 0.until(size).associate { - readString() to readAny() - } - } - - NULL -> null - - ERROR -> { - val message = readString() - val locations = 0.until(readInt()).map { - Error.Location(readInt(), readInt()) - } - val path = 0.until(readInt()).map { - readAny()!! - } - - @Suppress("UNCHECKED_CAST") - val extensions = readAny() as Map? - Builder(message = message) - .path(path) - .apply { - for ((key, value) in extensions.orEmpty()) { - putExtension(key, value) - } - if (locations.isNotEmpty()) { - locations(locations) - } - } - .build() - } - - else -> error("Trying to read unsupported Record value: $what") - } - } - - private const val STRING = 0 - private const val INT = 1 - private const val LONG = 2 - private const val BOOLEAN = 3 - private const val DOUBLE = 4 - private const val JSON_NUMBER = 5 - private const val LIST = 6 - private const val MAP = 7 - private const val CACHE_KEY = 8 - private const val NULL = 9 - private const val ERROR = 10 -} diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt index f1f93a37..849806d0 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordDatabase.kt @@ -1,52 +1,69 @@ package com.apollographql.cache.normalized.sql.internal +import app.cash.sqldelight.Query +import app.cash.sqldelight.db.QueryResult +import app.cash.sqldelight.db.SqlDriver +import com.apollographql.apollo.mpp.currentTimeMillis import com.apollographql.cache.normalized.api.Record +import com.apollographql.cache.normalized.sql.internal.record.RecordQueries +import com.apollographql.cache.normalized.sql.internal.record.SqlRecordDatabase -/** - * A database that can store [Record] - * - * All calls are synchronous, the calling code is expected to handle threading. - * - */ -internal interface RecordDatabase { - /** - * @return the [Record] of null if there is no record for the given id - */ - fun select(key: String): Record? +internal class RecordDatabase(private val driver: SqlDriver) { + private val recordQueries: RecordQueries = SqlRecordDatabase(driver).recordQueries + + fun transaction(body: () -> T): T { + return recordQueries.transactionWithResult { + body() + } + } /** - * @return the list of records for the given ids - * This is an optimization to avoid doing too many queries. - * - * @param ids the ids to get the record for. [ids.size] must be less than 999 - * @return the [Record] for the ids. If some [Record]s are missing, the returned list size might be - * less that [ids] + * @param keys the keys of the records to select, size must be <= 999 */ - fun select(keys: Collection): List + fun selectRecords(keys: Collection): List { + return recordQueries.selectRecords(keys).executeAsList().map { RecordSerializer.deserialize(it.key, it.record) } + } + + fun selectAllRecords(): List { + return recordQueries.selectAllRecords().executeAsList().map { RecordSerializer.deserialize(it.key, it.record) } + } + + fun insertOrUpdateRecord(record: Record) { + recordQueries.insertOrUpdateRecord(key = record.key.key, record = RecordSerializer.serialize(record), update_date = currentTimeMillis()) + } - fun selectAll(): List /** - * executes code in a transaction + * @param keys the keys of the records to delete, size must be <= 999 */ - fun transaction( - noEnclosing: Boolean = false, - body: () -> T, - ): T + fun deleteRecords(keys: Collection) { + recordQueries.deleteRecords(keys) + } - fun delete(key: String) + fun deleteAllRecords() { + recordQueries.deleteAllRecords() + } - fun delete(keys: Collection) + fun databaseSize(): Long { + return driver.executeQuery(null, "SELECT page_count * page_size FROM pragma_page_count(), pragma_page_size();", { + it.next() + QueryResult.Value(it.getLong(0)!!) + }, 0).value + } - fun deleteMatching(pattern: String) + fun count(): Query { + return recordQueries.count() + } - fun deleteAll() + fun trimByUpdateDate(limit: Long) { + recordQueries.trimByUpdateDate(limit) + } - /** - * Returns the number of rows affected by the last query - */ - fun changes(): Long + fun vacuum() { + driver.execute(null, "VACUUM", 0) + } - fun insert(record: Record) - fun update(record: Record) + fun changes(): Long { + return recordQueries.changes().executeAsOne() + } } diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordSerializer.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordSerializer.kt new file mode 100644 index 00000000..79a22f6d --- /dev/null +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/RecordSerializer.kt @@ -0,0 +1,324 @@ +package com.apollographql.cache.normalized.sql.internal + +import com.apollographql.apollo.api.Error +import com.apollographql.apollo.api.Error.Builder +import com.apollographql.apollo.api.json.ApolloJsonElement +import com.apollographql.apollo.api.json.JsonNumber +import com.apollographql.cache.normalized.api.ApolloCacheHeaders +import com.apollographql.cache.normalized.api.CacheKey +import com.apollographql.cache.normalized.api.Record +import com.apollographql.cache.normalized.api.RecordValue +import okio.Buffer +import okio.utf8Size + +/** + * A serializer that serializes/deserializes [RecordValue]s to/from [ByteArray]s. + */ +internal object RecordSerializer { + fun serialize(record: Record): ByteArray { + val buffer = Buffer() + buffer.writeMap(record.fields) + buffer._writeInt(record.metadata.size) + for ((k, v) in record.metadata.mapKeys { (k, _) -> knownMetadataKeys[k] ?: k }) { + buffer.writeString(k) + buffer.writeMap(v) + } + return buffer.readByteArray() + } + + fun deserialize(key: String, bytes: ByteArray): Record { + val buffer = Buffer().write(bytes) + val fields = buffer.readMap() + val metadataSize = buffer._readInt() + val metadata = HashMap>(metadataSize).apply { + repeat(metadataSize) { + val k = buffer.readString() + val v = buffer.readMap() + put(k, v) + } + }.mapKeys { (k, _) -> knownMetadataKeysInverted[k] ?: k } + return Record( + key = CacheKey(key), + fields = fields, + mutationId = null, + metadata = metadata + ) + } + + private fun Buffer.writeString(value: String) { + _writeInt(value.utf8Size().toInt()) + writeUtf8(value) + } + + private fun Buffer.readString(): String { + return readUtf8(_readInt().toLong()) + } + + private fun Buffer._writeInt(value: Int) { + when (value) { + in 0.. { + writeByte(value) + } + + in Byte.MIN_VALUE..Byte.MAX_VALUE -> { + writeByte(INT_BYTE) + writeByte(value.toInt()) + } + + in Short.MIN_VALUE..Short.MAX_VALUE -> { + writeByte(INT_SHORT) + writeShort(value.toInt()) + } + + else -> { + writeByte(INT_INT) + writeInt(value.toInt()) + } + } + } + + private fun Buffer._readInt(): Int { + val what = readByte().toInt() and 0xFF + return when { + what < FIRST -> what + what == INT_BYTE -> readByte().toInt() + what == INT_SHORT -> readShort().toInt() + what == INT_INT -> readInt() + else -> error("Trying to read unsupported Int type: $what") + } + } + + private fun Buffer._writeLong(value: Long) { + when (value) { + 0L -> { + writeByte(LONG_0) + } + + in Byte.MIN_VALUE..Byte.MAX_VALUE -> { + writeByte(LONG_BYTE) + writeByte(value.toInt()) + } + + in Short.MIN_VALUE..Short.MAX_VALUE -> { + writeByte(LONG_SHORT) + writeShort(value.toInt()) + } + + in Int.MIN_VALUE..Int.MAX_VALUE -> { + writeByte(LONG_INT) + writeInt(value.toInt()) + } + + else -> { + writeByte(LONG_LONG) + writeLong(value.toLong()) + } + } + } + + private fun Buffer.writeMap(value: Map<*, *>) { + _writeInt(value.size) + @Suppress("UNCHECKED_CAST") + value as Map + for ((k, v) in value) { + writeString(k) + writeAny(v) + } + } + + private fun Buffer.readMap(): Map { + val size = _readInt() + return HashMap(size).apply { + repeat(size) { + put(readString(), readAny()) + } + } + } + + private fun Buffer.writeAny(value: RecordValue) { + when (value) { + is String -> { + if (value.isEmpty()) { + writeByte(STRING_EMPTY) + } else { + writeByte(STRING) + writeString(value) + } + } + + is Int -> { + _writeInt(value) + } + + is Long -> { + _writeLong(value) + } + + is Double -> { + writeByte(DOUBLE) + writeLong(value.toBits()) + } + + is JsonNumber -> { + writeByte(JSON_NUMBER) + writeString(value.value) + } + + is Boolean -> { + if (value) { + writeByte(BOOLEAN_TRUE) + } else { + writeByte(BOOLEAN_FALSE) + } + } + + is CacheKey -> { + writeByte(CACHE_KEY) + writeString(value.key) + } + + is List<*> -> { + if (value.isEmpty()) { + writeByte(LIST_EMPTY) + } else { + writeByte(LIST) + _writeInt(value.size) + value.forEach { + writeAny(it) + } + } + } + + is Map<*, *> -> { + if (value.isEmpty()) { + writeByte(MAP_EMPTY) + } else { + writeByte(MAP) + writeMap(value) + } + } + + null -> { + writeByte(NULL) + } + + is Error -> { + writeByte(ERROR) + writeString(value.message) + _writeInt(value.locations?.size ?: 0) + for (location in value.locations.orEmpty()) { + _writeInt(location.line) + _writeInt(location.column) + } + _writeInt(value.path?.size ?: 0) + for (path in value.path.orEmpty()) { + writeAny(path) + } + writeAny(value.extensions) + } + + else -> error("Trying to write unsupported Record value: $value") + } + } + + private fun Buffer.readAny(): RecordValue { + val what = readByte().toInt() and 0xFF + return if (what < FIRST) { + what + } else { + when (what) { + STRING -> readString() + STRING_EMPTY -> "" + INT_BYTE -> readByte().toInt() + INT_SHORT -> readShort().toInt() + INT_INT -> readInt() + LONG_0 -> 0L + LONG_BYTE -> readByte().toLong() + LONG_SHORT -> readShort().toLong() + LONG_INT -> readInt().toLong() + LONG_LONG -> readLong() + DOUBLE -> Double.fromBits(readLong()) + JSON_NUMBER -> JsonNumber(readString()) + BOOLEAN_TRUE -> true + BOOLEAN_FALSE -> false + CACHE_KEY -> { + CacheKey(readString()) + } + + LIST -> { + val size = _readInt() + 0.until(size).map { + readAny() + } + } + + LIST_EMPTY -> emptyList() + + MAP -> { + readMap() + } + + MAP_EMPTY -> emptyMap() + + NULL -> null + + ERROR -> { + val message = readString() + val locations = 0.until(_readInt()).map { + Error.Location(_readInt(), _readInt()) + } + val path = 0.until(_readInt()).map { + readAny()!! + } + + @Suppress("UNCHECKED_CAST") + val extensions = readAny() as Map? + Builder(message = message) + .path(path) + .apply { + for ((key, value) in extensions.orEmpty()) { + putExtension(key, value) + } + if (locations.isNotEmpty()) { + locations(locations) + } + } + .build() + } + + else -> error("Trying to read unsupported Record type: $what") + } + } + } + + private const val FIRST = 255 - 32 + + private const val NULL = FIRST + private const val STRING = FIRST + 1 + private const val STRING_EMPTY = FIRST + 2 + private const val INT_BYTE = FIRST + 3 + private const val INT_SHORT = FIRST + 4 + private const val INT_INT = FIRST + 5 + private const val LONG_0 = FIRST + 6 + private const val LONG_BYTE = FIRST + 7 + private const val LONG_SHORT = FIRST + 8 + private const val LONG_INT = FIRST + 9 + private const val LONG_LONG = FIRST + 10 + private const val BOOLEAN_TRUE = FIRST + 11 + private const val BOOLEAN_FALSE = FIRST + 12 + private const val DOUBLE = FIRST + 13 + private const val JSON_NUMBER = FIRST + 14 + private const val LIST = FIRST + 15 + private const val LIST_EMPTY = FIRST + 16 + private const val MAP = FIRST + 17 + private const val MAP_EMPTY = FIRST + 18 + private const val CACHE_KEY = FIRST + 19 + private const val ERROR = FIRST + 20 + + // Encode certain known metadata keys as single byte strings to save space + private val knownMetadataKeys = mapOf( + ApolloCacheHeaders.RECEIVED_DATE to "0", + ApolloCacheHeaders.EXPIRATION_DATE to "1", + ) + private val knownMetadataKeysInverted = knownMetadataKeys.entries.associate { (k, v) -> v to k } +} diff --git a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/factoryHelpers.kt b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/factoryHelpers.kt index 0fbdfe92..8892a1be 100644 --- a/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/factoryHelpers.kt +++ b/normalized-cache-sqlite-incubating/src/commonMain/kotlin/com/apollographql/cache/normalized/sql/internal/factoryHelpers.kt @@ -4,7 +4,7 @@ import app.cash.sqldelight.db.QueryResult import app.cash.sqldelight.db.SqlDriver import app.cash.sqldelight.db.SqlSchema import com.apollographql.apollo.exception.apolloExceptionHandler -import com.apollographql.cache.normalized.sql.internal.blob.BlobDatabase +import com.apollographql.cache.normalized.sql.internal.record.SqlRecordDatabase internal fun createRecordDatabase(driver: SqlDriver): RecordDatabase { maybeCreateOrMigrateSchema(driver, getSchema()) @@ -30,12 +30,20 @@ internal fun createRecordDatabase(driver: SqlDriver): RecordDatabase { */ } - val expectedTableName = "blobs" + val expectedTableName = "record" check(tableNames.isEmpty() || tableNames.contains(expectedTableName)) { "Apollo: Cannot find the '$expectedTableName' table (found '$tableNames' instead)" } - return BlobRecordDatabase(BlobDatabase(driver).blobQueries) + try { + // Increase the memory cache to 8 MiB + // https://www.sqlite.org/pragma.html#pragma_cache_size + driver.executeQuery(null, "PRAGMA cache_size = -8192;", { QueryResult.Unit }, 0) + } catch (_: Exception) { + // Not supported on all platforms, ignore + } + + return RecordDatabase(driver) } -internal fun getSchema(): SqlSchema> = BlobDatabase.Schema +internal fun getSchema(): SqlSchema> = SqlRecordDatabase.Schema diff --git a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/blob/com/apollographql/cache/normalized/sql/internal/blob/blob.sq b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/blob/com/apollographql/cache/normalized/sql/internal/blob/blob.sq deleted file mode 100644 index e82a4938..00000000 --- a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/blob/com/apollographql/cache/normalized/sql/internal/blob/blob.sq +++ /dev/null @@ -1,35 +0,0 @@ -CREATE TABLE blobs ( - key TEXT NOT NULL PRIMARY KEY, - blob BLOB NOT NULL -) WITHOUT ROWID; - -recordForKey: -SELECT key, blob FROM blobs WHERE key=?; - -recordsForKeys: -SELECT key, blob FROM blobs WHERE key IN ?; - -insert: -INSERT INTO blobs (key, blob) VALUES (?,?); - -update: -UPDATE blobs SET blob=:blob WHERE key=:key; - -delete: -DELETE FROM blobs WHERE key=?; - -deleteRecords: -DELETE FROM blobs WHERE key IN ?; - -deleteRecordsWithKeyMatching: -DELETE FROM blobs WHERE key LIKE ? ESCAPE ?; - --- use only for debug -selectRecords: -SELECT * FROM blobs; - -changes: -SELECT changes(); - -deleteAll: -DELETE FROM blobs; diff --git a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/blob/migrations/1.sqm b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/blob/migrations/1.sqm deleted file mode 100644 index f57acba3..00000000 --- a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/blob/migrations/1.sqm +++ /dev/null @@ -1,7 +0,0 @@ --- Version 1 is either the blob schema (do nothing) or the legacy json schema (drop and create) -DROP TABLE IF EXISTS records; - -CREATE TABLE IF NOT EXISTS blobs ( - key TEXT NOT NULL PRIMARY KEY, - blob BLOB NOT NULL -) WITHOUT ROWID; diff --git a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/blob2/com/apollographql/cache/normalized/sql/internal/blob2/blob2.sq b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/blob2/com/apollographql/cache/normalized/sql/internal/blob2/blob2.sq deleted file mode 100644 index 80f8da29..00000000 --- a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/blob2/com/apollographql/cache/normalized/sql/internal/blob2/blob2.sq +++ /dev/null @@ -1,45 +0,0 @@ -CREATE TABLE blobs ( - key TEXT NOT NULL PRIMARY KEY, - blob BLOB NOT NULL, - date INTEGER -) WITHOUT ROWID; - -CREATE INDEX date_idx -ON blobs(date); - -recordForKey: -SELECT key, blob FROM blobs WHERE key=?; - -recordsForKeys: -SELECT key, blob FROM blobs WHERE key IN ?; - -insert: -INSERT INTO blobs (key, blob, date) VALUES (?,?, ?); - -update: -UPDATE blobs SET blob=:blob, date=:date WHERE key=:key; - -delete: -DELETE FROM blobs WHERE key=?; - -deleteRecords: -DELETE FROM blobs WHERE key IN ?; - -deleteRecordsWithKeyMatching: -DELETE FROM blobs WHERE key LIKE ? ESCAPE ?; - --- use only for debug -selectRecords: -SELECT * FROM blobs; - -changes: -SELECT changes(); - -deleteAll: -DELETE FROM blobs; - -count: -SELECT COUNT(*) FROM blobs; - -trim: -DELETE FROM blobs WHERE key IN (SELECT key FROM blobs ORDER BY date LIMIT ?); diff --git a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/record/com/apollographql/cache/normalized/sql/internal/record/record.sq b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/record/com/apollographql/cache/normalized/sql/internal/record/record.sq new file mode 100644 index 00000000..141dc343 --- /dev/null +++ b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/record/com/apollographql/cache/normalized/sql/internal/record/record.sq @@ -0,0 +1,31 @@ +CREATE TABLE record ( + key TEXT NOT NULL, + record BLOB NOT NULL, + update_date INTEGER NOT NULL, + PRIMARY KEY (key) ON CONFLICT REPLACE +) +WITHOUT ROWID; + +selectRecords: +SELECT key, record FROM record WHERE key IN ?; + +selectAllRecords: +SELECT key, record FROM record; + +insertOrUpdateRecord: +INSERT INTO record (key, record, update_date) VALUES (?, ?, ?); + +deleteRecords: +DELETE FROM record WHERE key IN ?; + +deleteAllRecords: +DELETE FROM record; + +count: +SELECT count(*) FROM record; + +trimByUpdateDate: +DELETE FROM record WHERE key IN (SELECT key FROM record ORDER BY update_date LIMIT ?); + +changes: +SELECT changes(); diff --git a/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/record/com/migrations/1.sqm b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/record/com/migrations/1.sqm new file mode 100644 index 00000000..d3a6425a --- /dev/null +++ b/normalized-cache-sqlite-incubating/src/commonMain/sqldelight/record/com/migrations/1.sqm @@ -0,0 +1,10 @@ +-- Version 1 is either the record schema (do nothing) or the legacy json schema (drop and create) +DROP TABLE IF EXISTS records; + +CREATE TABLE IF NOT EXISTS record ( + key TEXT NOT NULL, + record BLOB NOT NULL, + update_date INTEGER NOT NULL, + PRIMARY KEY (key) ON CONFLICT REPLACE +) +WITHOUT ROWID; diff --git a/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt b/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt index 0ca8f53e..b8835714 100644 --- a/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt +++ b/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/SqlNormalizedCacheTest.kt @@ -13,8 +13,8 @@ import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.DefaultRecordMerger import com.apollographql.cache.normalized.api.NormalizedCache import com.apollographql.cache.normalized.api.Record -import com.apollographql.cache.normalized.sql.internal.BlobRecordDatabase -import com.apollographql.cache.normalized.sql.internal.blob.BlobQueries +import com.apollographql.cache.normalized.sql.internal.RecordDatabase +import com.apollographql.cache.normalized.testing.fieldKey import kotlin.test.BeforeTest import kotlin.test.Test import kotlin.test.assertEquals @@ -108,7 +108,7 @@ class SqlNormalizedCacheTest { cacheHeaders = CacheHeaders.NONE, recordMerger = DefaultRecordMerger, ) - cache.remove(cacheKey = CacheKey(STANDARD_KEY), cascade = false) + cache.remove(cacheKey = STANDARD_KEY, cascade = false) val record = cache.loadRecord(STANDARD_KEY, CacheHeaders.NONE) assertNull(record) } @@ -122,35 +122,6 @@ class SqlNormalizedCacheTest { assertNull(cache.loadRecord(STANDARD_KEY, CacheHeaders.NONE)) } - // Tests for StandardCacheHeader compliance - @Test - fun testHeader_evictAfterRead() { - createRecord(STANDARD_KEY) - val record = cache.loadRecord(STANDARD_KEY, CacheHeaders.builder() - .addHeader(ApolloCacheHeaders.EVICT_AFTER_READ, "true").build() - ) - assertNotNull(record) - val nullRecord = cache.loadRecord(STANDARD_KEY, CacheHeaders.builder() - .addHeader(ApolloCacheHeaders.EVICT_AFTER_READ, "true").build() - ) - assertNull(nullRecord) - } - - @Test - fun testHeader_evictAfterBatchRead() { - createRecord(STANDARD_KEY) - createRecord(QUERY_ROOT_KEY) - val selectionSet = setOf(STANDARD_KEY, QUERY_ROOT_KEY) - val records = cache.loadRecords(selectionSet, CacheHeaders.builder() - .addHeader(ApolloCacheHeaders.EVICT_AFTER_READ, "true").build() - ) - assertEquals(records.size, 2) - val emptyRecords = cache.loadRecords(selectionSet, CacheHeaders.builder() - .addHeader(ApolloCacheHeaders.EVICT_AFTER_READ, "true").build() - ) - assertTrue(emptyRecords.isEmpty()) - } - @Test fun testHeader_noCache() { cache.merge( @@ -180,7 +151,7 @@ class SqlNormalizedCacheTest { ) val record = cache.loadRecord(STANDARD_KEY, CacheHeaders.NONE) assertNotNull(record) - assertEquals(expected = setOf("$STANDARD_KEY.fieldKey", "$STANDARD_KEY.newFieldKey"), actual = changedKeys) + assertEquals(expected = setOf(STANDARD_KEY.fieldKey("fieldKey"), STANDARD_KEY.fieldKey("newFieldKey")), actual = changedKeys) assertEquals(expected = "valueUpdated", actual = record.fields["fieldKey"]) assertEquals(expected = true, actual = record.fields["newFieldKey"]) } @@ -205,36 +176,16 @@ class SqlNormalizedCacheTest { assertEquals(expected = true, actual = record.fields["newFieldKey"]) } - @Test - fun testPatternRemove() { - createRecord("specialKey1") - createRecord("specialKey2") - createRecord("regularKey1") - - cache.remove("specialKey%") - assertNull(cache.loadRecord("specialKey1", CacheHeaders.NONE)) - assertNull(cache.loadRecord("specialKey1", CacheHeaders.NONE)) - assertNotNull(cache.loadRecord("regularKey1", CacheHeaders.NONE)) - } - - @Test - fun testPatternRemoveWithEscape() { - createRecord("%1") - - cache.remove("\\%%") - assertNull(cache.loadRecord("%1", CacheHeaders.NONE)) - } - @Test fun exceptionCallsExceptionHandler() { - val badCache = SqlNormalizedCache(BlobRecordDatabase(BlobQueries(BadDriver))) + val badCache = SqlNormalizedCache(RecordDatabase(BadDriver)) var throwable: Throwable? = null apolloExceptionHandler = { throwable = it } badCache.loadRecord(STANDARD_KEY, CacheHeaders.NONE) - assertEquals("Unable to read a record from the database", throwable!!.message) + assertEquals("Unable to read records from the database", throwable!!.message) assertEquals("bad cache", throwable!!.cause!!.message) throwable = null @@ -249,7 +200,7 @@ class SqlNormalizedCacheTest { cacheHeaders = CacheHeaders.NONE, recordMerger = DefaultRecordMerger, ) - assertEquals("Unable to merge a record from the database", throwable!!.message) + assertEquals("Unable to merge records into the database", throwable!!.message) assertEquals("bad cache", throwable!!.cause!!.message) } @@ -258,7 +209,7 @@ class SqlNormalizedCacheTest { // Creating a self-referencing record cache.merge( record = Record( - key = "selfRefKey", + key = CacheKey("selfRefKey"), fields = mapOf( "field1" to "value1", "selfRef" to CacheKey("selfRefKey"), @@ -271,7 +222,7 @@ class SqlNormalizedCacheTest { val result = cache.remove(cacheKey = CacheKey("selfRefKey"), cascade = true) assertTrue(result) - val record = cache.loadRecord("selfRefKey", CacheHeaders.NONE) + val record = cache.loadRecord(CacheKey("selfRefKey"), CacheHeaders.NONE) assertNull(record) } @@ -280,7 +231,7 @@ class SqlNormalizedCacheTest { // Creating two records that reference each other cache.merge( record = Record( - key = "key1", + key = CacheKey("key1"), fields = mapOf( "field1" to "value1", "refToKey2" to CacheKey("key2"), @@ -292,7 +243,7 @@ class SqlNormalizedCacheTest { cache.merge( record = Record( - key = "key2", + key = CacheKey("key2"), fields = mapOf( "field1" to "value2", "refToKey1" to CacheKey("key1"), @@ -305,8 +256,8 @@ class SqlNormalizedCacheTest { val result = cache.remove(cacheKey = CacheKey("key1"), cascade = true) assertTrue(result) - assertNull(cache.loadRecord("key1", CacheHeaders.NONE)) - assertNull(cache.loadRecord("key2", CacheHeaders.NONE)) + assertNull(cache.loadRecord(CacheKey("key1"), CacheHeaders.NONE)) + assertNull(cache.loadRecord(CacheKey("key2"), CacheHeaders.NONE)) } private val BadDriver = object : SqlDriver { @@ -349,7 +300,7 @@ class SqlNormalizedCacheTest { } } - private fun createRecord(key: String) { + private fun createRecord(key: CacheKey) { cache.merge( record = Record( key = key, @@ -364,7 +315,7 @@ class SqlNormalizedCacheTest { } companion object { - const val STANDARD_KEY = "key" - const val QUERY_ROOT_KEY = "QUERY_ROOT" + val STANDARD_KEY = CacheKey("key") + val QUERY_ROOT_KEY = CacheKey.rootKey() } } diff --git a/normalized-cache-sqlite-incubating/src/jvmTest/kotlin/com/apollographql/cache/normalized/sql/TrimTest.kt b/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/TrimTest.kt similarity index 53% rename from normalized-cache-sqlite-incubating/src/jvmTest/kotlin/com/apollographql/cache/normalized/sql/TrimTest.kt rename to normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/TrimTest.kt index 970f7e54..af7b2604 100644 --- a/normalized-cache-sqlite-incubating/src/jvmTest/kotlin/com/apollographql/cache/normalized/sql/TrimTest.kt +++ b/normalized-cache-sqlite-incubating/src/commonTest/kotlin/com/apollographql/cache/normalized/sql/TrimTest.kt @@ -1,52 +1,48 @@ package com.apollographql.cache.normalized.sql +import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.api.CacheHeaders +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.DefaultRecordMerger import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.withDates -import org.junit.Test -import java.io.File +import kotlin.test.Test import kotlin.test.assertEquals import kotlin.test.assertNull class TrimTest { @Test fun trimTest() { - val dbName = "build/test.db" - val dbUrl = "jdbc:sqlite:$dbName" - val dbFile = File(dbName) - - dbFile.delete() - - val cache = TrimmableNormalizedCacheFactory(dbUrl).create() + val apolloStore = ApolloStore(SqlNormalizedCacheFactory()).also { it.clearAll() } val largeString = "".padStart(1024, '?') val oldRecord = Record( - key = "old", + key = CacheKey("old"), fields = mapOf("key" to "value"), mutationId = null, metadata = emptyMap() - ).withDates(receivedDate = "0", expirationDate = null) - cache.merge(oldRecord, CacheHeaders.NONE, recordMerger = DefaultRecordMerger) + ) + apolloStore.accessCache { it.merge(oldRecord, CacheHeaders.NONE, recordMerger = DefaultRecordMerger) } val newRecords = 0.until(2 * 1024).map { Record( - key = "new$it", + key = CacheKey("new$it"), fields = mapOf("key" to largeString), mutationId = null, metadata = emptyMap() ).withDates(receivedDate = it.toString(), expirationDate = null) } - cache.merge(newRecords, CacheHeaders.NONE, recordMerger = DefaultRecordMerger) + apolloStore.accessCache { it.merge(newRecords, CacheHeaders.NONE, recordMerger = DefaultRecordMerger) } - assertEquals(9596928, dbFile.length()) + val sizeBeforeTrim = apolloStore.trim(-1) + assertEquals(8515584, sizeBeforeTrim) // Trim the cache by 10% - val trimmedCache = TrimmableNormalizedCacheFactory(dbUrl, 9596928, 0.1f).create() + val sizeAfterTrim = apolloStore.trim(8515584, 0.1f) - assertEquals(8548352, dbFile.length()) + assertEquals(7667712, sizeAfterTrim) // The oldest key must have been removed - assertNull(trimmedCache.loadRecord("old", CacheHeaders.NONE)) + assertNull(apolloStore.accessCache { it.loadRecord(CacheKey("old"), CacheHeaders.NONE) }) } } diff --git a/normalized-cache-sqlite-incubating/src/jvmMain/kotlin/com/apollographql/cache/normalized/sql/TrimmableNormalizedCacheFactory.kt b/normalized-cache-sqlite-incubating/src/jvmMain/kotlin/com/apollographql/cache/normalized/sql/TrimmableNormalizedCacheFactory.kt deleted file mode 100644 index 10936e1a..00000000 --- a/normalized-cache-sqlite-incubating/src/jvmMain/kotlin/com/apollographql/cache/normalized/sql/TrimmableNormalizedCacheFactory.kt +++ /dev/null @@ -1,48 +0,0 @@ -package com.apollographql.cache.normalized.sql - -import app.cash.sqldelight.driver.jdbc.sqlite.JdbcSqliteDriver -import com.apollographql.cache.normalized.api.NormalizedCache -import com.apollographql.cache.normalized.api.NormalizedCacheFactory -import com.apollographql.cache.normalized.sql.internal.Blob2RecordDatabase -import com.apollographql.cache.normalized.sql.internal.blob2.Blob2Database -import com.apollographql.cache.normalized.sql.internal.maybeCreateOrMigrateSchema -import java.io.File - -/** - * Experimental database that supports trimming at startup - * - * There are no backward compatibilities, DO NOT ship in a production app - * - * @param url Database connection URL in the form of `jdbc:sqlite:path` where `path` is either blank - * @param maxSize if the size of the database is bigger than [maxSize] (in bytes), it will be trimmed - * @param trimFactor the amount of trimming to do - */ -class TrimmableNormalizedCacheFactory internal constructor( - private val url: String, - private val maxSize: Long? = null, - private val trimFactor: Float = 0.1f, -) : NormalizedCacheFactory() { - private val driver = JdbcSqliteDriver(url) - - override fun create(): NormalizedCache { - maybeCreateOrMigrateSchema(driver, Blob2Database.Schema) - - val database = Blob2Database(driver) - val queries = database.blob2Queries - if (maxSize != null) { - val path = url.substringAfter("jdbc:sqlite:") - if (path.isNotBlank()) { - val size = File(path).length() - if (size >= maxSize) { - val count = queries.count().executeAsOne() - queries.trim((count * trimFactor).toLong()) - driver.execute(null, "VACUUM", 0) - } - } - } - - return SqlNormalizedCache(Blob2RecordDatabase(queries)) - } -} - - diff --git a/settings.gradle.kts b/settings.gradle.kts index b85c4ce8..e6f2a61e 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -8,4 +8,5 @@ include( "normalized-cache-incubating", "normalized-cache-sqlite-incubating", "normalized-cache-apollo-compiler-plugin", + "test-utils", ) diff --git a/test-utils/README.md b/test-utils/README.md new file mode 100644 index 00000000..bcf6f6c2 --- /dev/null +++ b/test-utils/README.md @@ -0,0 +1,3 @@ +# Module test-utils + +A few testing utilities (not published). diff --git a/test-utils/api/test-utils.api b/test-utils/api/test-utils.api new file mode 100644 index 00000000..deee5231 --- /dev/null +++ b/test-utils/api/test-utils.api @@ -0,0 +1,6 @@ +public final class com/apollographql/cache/normalized/testing/CacheKeyKt { + public static final fun append-eNSUWrY (Ljava/lang/String;[Ljava/lang/String;)Ljava/lang/String; + public static final fun fieldKey-eNSUWrY (Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String; + public static final fun keyToString-pWl1Des (Ljava/lang/String;)Ljava/lang/String; +} + diff --git a/test-utils/api/test-utils.klib.api b/test-utils/api/test-utils.klib.api new file mode 100644 index 00000000..a99a94d6 --- /dev/null +++ b/test-utils/api/test-utils.klib.api @@ -0,0 +1,11 @@ +// Klib ABI Dump +// Targets: [iosArm64, iosSimulatorArm64, iosX64, js, macosArm64, macosX64, tvosArm64, tvosSimulatorArm64, tvosX64, wasmJs, watchosArm32, watchosArm64, watchosSimulatorArm64] +// Rendering settings: +// - Signature version: 2 +// - Show manifest properties: true +// - Show declarations: true + +// Library unique name: +final fun (com.apollographql.cache.normalized.api/CacheKey).com.apollographql.cache.normalized.testing/append(kotlin/Array...): com.apollographql.cache.normalized.api/CacheKey // com.apollographql.cache.normalized.testing/append|append@com.apollographql.cache.normalized.api.CacheKey(kotlin.Array...){}[0] +final fun (com.apollographql.cache.normalized.api/CacheKey).com.apollographql.cache.normalized.testing/fieldKey(kotlin/String): kotlin/String // com.apollographql.cache.normalized.testing/fieldKey|fieldKey@com.apollographql.cache.normalized.api.CacheKey(kotlin.String){}[0] +final fun (com.apollographql.cache.normalized.api/CacheKey).com.apollographql.cache.normalized.testing/keyToString(): kotlin/String // com.apollographql.cache.normalized.testing/keyToString|keyToString@com.apollographql.cache.normalized.api.CacheKey(){}[0] diff --git a/test-utils/build.gradle.kts b/test-utils/build.gradle.kts new file mode 100644 index 00000000..06538cc8 --- /dev/null +++ b/test-utils/build.gradle.kts @@ -0,0 +1,21 @@ +plugins { + id("org.jetbrains.kotlin.multiplatform") +} + +group = "com.apollographql.cache" + +kotlin { + configureKmp( + withJs = true, + withWasm = true, + withAndroid = false, + ) + + sourceSets { + getByName("commonMain") { + dependencies { + api(project(":normalized-cache-incubating")) + } + } + } +} diff --git a/test-utils/src/commonMain/kotlin/com/apollographql/cache/normalized/testing/CacheKey.kt b/test-utils/src/commonMain/kotlin/com/apollographql/cache/normalized/testing/CacheKey.kt new file mode 100644 index 00000000..51f35a94 --- /dev/null +++ b/test-utils/src/commonMain/kotlin/com/apollographql/cache/normalized/testing/CacheKey.kt @@ -0,0 +1,19 @@ +package com.apollographql.cache.normalized.testing + +import com.apollographql.cache.normalized.api.CacheKey + +fun CacheKey.fieldKey(fieldName: String): String { + return "${keyToString()}.$fieldName" +} + +fun CacheKey.append(vararg keys: String): CacheKey { + var cacheKey: CacheKey = this + for (key in keys) { + cacheKey = CacheKey("${cacheKey.key}.$key") + } + return cacheKey +} + +fun CacheKey.keyToString(): String { + return key +} diff --git a/tests/defer/build.gradle.kts b/tests/defer/build.gradle.kts index cc0c7030..abc60ab5 100644 --- a/tests/defer/build.gradle.kts +++ b/tests/defer/build.gradle.kts @@ -24,6 +24,7 @@ kotlin { implementation(libs.kotlin.test) implementation(libs.apollo.testing.support) implementation(libs.apollo.mockserver) + implementation("com.apollographql.cache:test-utils") } } } diff --git a/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt b/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt index 1923306e..a5bef974 100644 --- a/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt +++ b/tests/defer/src/commonTest/kotlin/test/DeferNormalizedCacheTest.kt @@ -15,11 +15,14 @@ import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.FetchPolicy import com.apollographql.cache.normalized.api.CacheHeaders +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.apolloStore import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.optimisticUpdates import com.apollographql.cache.normalized.store +import com.apollographql.cache.normalized.testing.append +import com.apollographql.cache.normalized.testing.keyToString import com.apollographql.mockserver.MockServer import com.apollographql.mockserver.assertNoRequest import com.apollographql.mockserver.awaitRequest @@ -452,7 +455,8 @@ class DeferNormalizedCacheTest { val cacheExceptionResponse = actual.last() assertIs(networkExceptionResponse.exception) assertIs(cacheExceptionResponse.exception) - assertEquals("Object 'computers.0.screen' has no field named 'isColor'", cacheExceptionResponse.exception!!.message) + val key = CacheKey("computers").append("0", "screen").keyToString() + assertEquals("Object '$key' has no field named 'isColor'", cacheExceptionResponse.exception!!.message) } @Test @@ -539,7 +543,7 @@ class DeferNormalizedCacheTest { val multipartBody = mockServer.enqueueMultipart("application/json") multipartBody.enqueuePart(jsonList[0].encodeUtf8(), false) val recordFields = apolloClient.query(SimpleDeferQuery()).fetchPolicy(FetchPolicy.NetworkOnly).toFlow().map { - apolloClient.apolloStore.accessCache { it.loadRecord("computers.0", CacheHeaders.NONE)!!.fields }.also { + apolloClient.apolloStore.accessCache { it.loadRecord(CacheKey("computers").append("0"), CacheHeaders.NONE)!!.fields }.also { multipartBody.enqueuePart(jsonList[1].encodeUtf8(), true) } }.toList() diff --git a/tests/garbage-collection/build.gradle.kts b/tests/garbage-collection/build.gradle.kts index 4e7d06ac..a70bbd27 100644 --- a/tests/garbage-collection/build.gradle.kts +++ b/tests/garbage-collection/build.gradle.kts @@ -24,6 +24,7 @@ kotlin { implementation(libs.apollo.testing.support) implementation(libs.apollo.mockserver) implementation(libs.kotlin.test) + implementation("com.apollographql.cache:test-utils") } } } diff --git a/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt b/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt index 569d3bfe..1ec8329b 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/DanglingReferencesTest.kt @@ -11,8 +11,11 @@ import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.removeDanglingReferences import com.apollographql.cache.normalized.sql.SqlNormalizedCacheFactory import com.apollographql.cache.normalized.store +import com.apollographql.cache.normalized.testing.append +import com.apollographql.cache.normalized.testing.fieldKey import com.apollographql.mockserver.MockServer import com.apollographql.mockserver.enqueueString +import kotlinx.coroutines.test.TestResult import okio.use import kotlin.test.Test import kotlin.test.assertEquals @@ -21,9 +24,19 @@ import kotlin.test.assertTrue class DanglingReferencesTest { @Test - fun simple() = runTest { + fun simpleMemory() = simple(ApolloStore(MemoryCacheFactory())) + + @Test + fun simpleSql() = simple(ApolloStore(SqlNormalizedCacheFactory())) + + @Test + fun simpleChained(): TestResult { + return simple(ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory()))) + } + + private fun simple(apolloStore: ApolloStore) = runTest { val mockServer = MockServer() - val store = ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory())).also { it.clearAll() } + val store = apolloStore.also { it.clearAll() } ApolloClient.Builder() .serverUrl(mockServer.url()) .store(store) @@ -35,13 +48,13 @@ class DanglingReferencesTest { .execute() var allRecords = store.accessCache { it.allRecords() } - assertTrue(allRecords["Repository:0"]!!.fields.containsKey("starGazers")) + assertTrue(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("starGazers")) // Remove User 1, now Repository 0.starGazers is a dangling reference store.remove(CacheKey("User:1"), cascade = false) val removedFieldsAndRecords = store.removeDanglingReferences() assertEquals( - setOf("Repository:0.starGazers"), + setOf(CacheKey("Repository:0").fieldKey("starGazers")), removedFieldsAndRecords.removedFields ) assertEquals( @@ -49,14 +62,22 @@ class DanglingReferencesTest { removedFieldsAndRecords.removedRecords ) allRecords = store.accessCache { it.allRecords() } - assertFalse(allRecords["Repository:0"]!!.fields.containsKey("starGazers")) + assertFalse(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("starGazers")) } } @Test - fun multiple() = runTest { + fun multipleMemory() = multiple(ApolloStore(MemoryCacheFactory())) + + @Test + fun multipleSql() = multiple(ApolloStore(SqlNormalizedCacheFactory())) + + @Test + fun multipleChained() = multiple(ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory()))) + + private fun multiple(apolloStore: ApolloStore) = runTest { val mockServer = MockServer() - val store = ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory())).also { it.clearAll() } + val store = apolloStore.also { it.clearAll() } ApolloClient.Builder() .serverUrl(mockServer.url()) .store(store) @@ -78,24 +99,24 @@ class DanglingReferencesTest { val removedFieldsAndRecords = store.removeDanglingReferences() assertEquals( setOf( - "metaProjects.0.0.type.owners", - "metaProjects.0.0.type", - "QUERY_ROOT.metaProjects", + CacheKey("metaProjects").append("0", "0", "type").fieldKey("owners"), + CacheKey("metaProjects").append("0", "0").fieldKey("type"), + CacheKey("QUERY_ROOT").fieldKey("metaProjects"), ), removedFieldsAndRecords.removedFields ) assertEquals( setOf( - CacheKey("metaProjects.0.0.type"), - CacheKey("metaProjects.0.0"), + CacheKey("metaProjects").append("0", "0", "type"), + CacheKey("metaProjects").append("0", "0"), CacheKey("QUERY_ROOT"), ), removedFieldsAndRecords.removedRecords ) val allRecords = store.accessCache { it.allRecords() } - assertFalse(allRecords.containsKey("QUERY_ROOT")) - assertFalse(allRecords.containsKey("metaProjects.0.0")) - assertFalse(allRecords.containsKey("metaProjects.0.0.type")) + assertFalse(allRecords.containsKey(CacheKey("QUERY_ROOT"))) + assertFalse(allRecords.containsKey(CacheKey("metaProjects").append("0", "0"))) + assertFalse(allRecords.containsKey(CacheKey("metaProjects").append("0", "0", "type"))) } } diff --git a/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt b/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt index 49ed6e34..f7f10cc6 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/GarbageCollectTest.kt @@ -13,6 +13,8 @@ import com.apollographql.cache.normalized.garbageCollect import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.sql.SqlNormalizedCacheFactory import com.apollographql.cache.normalized.store +import com.apollographql.cache.normalized.testing.append +import com.apollographql.cache.normalized.testing.fieldKey import com.apollographql.mockserver.MockServer import com.apollographql.mockserver.enqueueString import okio.use @@ -23,9 +25,17 @@ import kotlin.time.Duration.Companion.seconds class GarbageCollectTest { @Test - fun garbageCollect() = runTest { + fun garbageCollectMemory() = garbageCollect(ApolloStore(MemoryCacheFactory())) + + @Test + fun garbageCollectSql() = garbageCollect(ApolloStore(SqlNormalizedCacheFactory())) + + @Test + fun garbageCollectChained() = garbageCollect(ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory()))) + + private fun garbageCollect(apolloStore: ApolloStore) = runTest { val mockServer = MockServer() - val store = ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory())).also { it.clearAll() } + val store = apolloStore.also { it.clearAll() } ApolloClient.Builder() .serverUrl(mockServer.url()) .store(store) @@ -51,35 +61,35 @@ class GarbageCollectTest { val garbageCollectResult = store.garbageCollect(maxAgeProvider) assertEquals( setOf( - "metaProjects.0.0.type.owners", - "metaProjects.0.1.type.owners", - "metaProjects.1.0.type.owners", + CacheKey("metaProjects").append("0", "0", "type").fieldKey("owners"), + CacheKey("metaProjects").append("0", "1", "type").fieldKey("owners"), + CacheKey("metaProjects").append("1", "0", "type").fieldKey("owners"), ), garbageCollectResult.removedStaleFields.removedFields ) assertEquals( setOf( - CacheKey("metaProjects.0.0.type"), - CacheKey("metaProjects.0.1.type"), - CacheKey("metaProjects.1.0.type"), + CacheKey("metaProjects").append("0", "0", "type"), + CacheKey("metaProjects").append("0", "1", "type"), + CacheKey("metaProjects").append("1", "0", "type"), ), garbageCollectResult.removedStaleFields.removedRecords ) assertEquals( setOf( - "metaProjects.0.0.type", - "metaProjects.0.1.type", - "metaProjects.1.0.type", - "QUERY_ROOT.metaProjects", + CacheKey("metaProjects").append("0", "0").fieldKey("type"), + CacheKey("metaProjects").append("0", "1").fieldKey("type"), + CacheKey("metaProjects").append("1", "0").fieldKey("type"), + CacheKey("QUERY_ROOT").fieldKey("metaProjects"), ), garbageCollectResult.removedDanglingReferences.removedFields ) assertEquals( setOf( - CacheKey("metaProjects.0.0"), - CacheKey("metaProjects.0.1"), - CacheKey("metaProjects.1.0"), + CacheKey("metaProjects").append("0", "0"), + CacheKey("metaProjects").append("0", "1"), + CacheKey("metaProjects").append("1", "0"), CacheKey("QUERY_ROOT"), ), garbageCollectResult.removedDanglingReferences.removedRecords diff --git a/tests/garbage-collection/src/commonTest/kotlin/ReachableCacheKeysTest.kt b/tests/garbage-collection/src/commonTest/kotlin/ReachableCacheKeysTest.kt index 6e1945f1..a06f62d6 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/ReachableCacheKeysTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/ReachableCacheKeysTest.kt @@ -19,14 +19,21 @@ import okio.use import test.fragment.RepositoryFragment import test.fragment.RepositoryFragmentImpl import kotlin.test.Test -import kotlin.test.assertContentEquals import kotlin.test.assertEquals class ReachableCacheKeysTest { @Test - fun getReachableCacheKeys() = runTest { + fun getReachableCacheKeysMemory() = getReachableCacheKeys(ApolloStore(MemoryCacheFactory())) + + @Test + fun getReachableCacheKeysSql() = getReachableCacheKeys(ApolloStore(SqlNormalizedCacheFactory())) + + @Test + fun getReachableCacheKeysChained() = getReachableCacheKeys(ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory()))) + + private fun getReachableCacheKeys(apolloStore: ApolloStore) = runTest { val mockServer = MockServer() - val store = ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory())).also { it.clearAll() } + val store = apolloStore.also { it.clearAll() } ApolloClient.Builder() .serverUrl(mockServer.url()) .store(store) @@ -127,8 +134,8 @@ class ReachableCacheKeysTest { apolloClient.query(query).fetchPolicy(FetchPolicy.NetworkOnly).execute() var reachableCacheKeys = store.accessCache { it.allRecords().getReachableCacheKeys() } - assertContentEquals( - listOf( + assertEquals( + setOf( CacheKey("QUERY_ROOT"), CacheKey("Repository:8"), CacheKey("Repository:7"), @@ -148,8 +155,8 @@ class ReachableCacheKeysTest { // Remove User 43, now Repositories 5 and 6 should not be reachable / 7 should still be reachable store.remove(CacheKey("User:43"), cascade = false) reachableCacheKeys = store.accessCache { it.allRecords().getReachableCacheKeys() } - assertContentEquals( - listOf( + assertEquals( + setOf( CacheKey("QUERY_ROOT"), CacheKey("Repository:8"), CacheKey("Repository:7"), @@ -170,8 +177,8 @@ class ReachableCacheKeysTest { RepositoryFragment(id = "500", __typename = "Repository", starGazers = emptyList()), ) reachableCacheKeys = store.accessCache { it.allRecords().getReachableCacheKeys() } - assertContentEquals( - listOf( + assertEquals( + setOf( CacheKey("QUERY_ROOT"), CacheKey("Repository:8"), CacheKey("Repository:7"), @@ -199,7 +206,7 @@ class ReachableCacheKeysTest { CacheKey("Repository:500"), CacheKey("Repository:7"), ), - store.accessCache { it.allRecords() }.keys.map { CacheKey(it) }.toSet() + store.accessCache { it.allRecords() }.keys.toSet() ) // Remove unreachable records, should remove Repositories 5, 6, and 500 @@ -216,7 +223,7 @@ class ReachableCacheKeysTest { CacheKey("Repository:2"), CacheKey("Repository:1"), ), - store.accessCache { it.allRecords() }.keys.map { CacheKey(it) }.toSet() + store.accessCache { it.allRecords() }.keys.toSet() ) assertEquals( setOf( diff --git a/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt b/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt index a74e8054..7333848c 100644 --- a/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt +++ b/tests/garbage-collection/src/commonTest/kotlin/StaleFieldsTest.kt @@ -17,6 +17,8 @@ import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.removeStaleFields import com.apollographql.cache.normalized.sql.SqlNormalizedCacheFactory import com.apollographql.cache.normalized.store +import com.apollographql.cache.normalized.testing.append +import com.apollographql.cache.normalized.testing.fieldKey import com.apollographql.mockserver.MockServer import com.apollographql.mockserver.enqueueString import okio.use @@ -30,9 +32,18 @@ import kotlin.time.Duration.Companion.seconds class StaleFieldsTest { @Test - fun clientControlledRemoveFields() = runTest { + fun clientControlledRemoveFieldsMemory() = clientControlledRemoveFields(ApolloStore(MemoryCacheFactory())) + + @Test + fun clientControlledRemoveFieldsSql() = clientControlledRemoveFields(ApolloStore(SqlNormalizedCacheFactory())) + + @Test + fun clientControlledRemoveFieldsChained() = + clientControlledRemoveFields(ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory()))) + + private fun clientControlledRemoveFields(apolloStore: ApolloStore) = runTest { val mockServer = MockServer() - val store = ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory())).also { it.clearAll() } + val store = apolloStore.also { it.clearAll() } ApolloClient.Builder() .serverUrl(mockServer.url()) .store(store) @@ -45,10 +56,10 @@ class StaleFieldsTest { .execute() var allRecords = store.accessCache { it.allRecords() } - assertTrue(allRecords["Repository:0"]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:0"]!!.fields.containsKey("starGazers")) - assertTrue(allRecords["Repository:1"]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:1"]!!.fields.containsKey("starGazers")) + assertTrue(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("stars")) + assertTrue(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("starGazers")) + assertTrue(allRecords[CacheKey("Repository:1")]!!.fields.containsKey("stars")) + assertTrue(allRecords[CacheKey("Repository:1")]!!.fields.containsKey("starGazers")) val maxAgeProvider = SchemaCoordinatesMaxAgeProvider( Cache.maxAges, @@ -58,18 +69,18 @@ class StaleFieldsTest { // Repository.stars has a max age of 60 seconds, so they should be removed / User has a max age of 90 seconds, so Repository.starGazers should be kept assertEquals( setOf( - "Repository:0.stars", - "Repository:1.stars", + CacheKey("Repository:0").fieldKey("stars"), + CacheKey("Repository:1").fieldKey("stars"), ), removedFieldsAndRecords.removedFields ) assertEquals( emptySet(), removedFieldsAndRecords.removedRecords ) allRecords = store.accessCache { it.allRecords() } - assertFalse(allRecords["Repository:0"]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:0"]!!.fields.containsKey("starGazers")) - assertFalse(allRecords["Repository:1"]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:1"]!!.fields.containsKey("starGazers")) + assertFalse(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("stars")) + assertTrue(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("starGazers")) + assertFalse(allRecords[CacheKey("Repository:1")]!!.fields.containsKey("stars")) + assertTrue(allRecords[CacheKey("Repository:1")]!!.fields.containsKey("starGazers")) mockServer.enqueueString(REPOSITORY_LIST_RESPONSE) apolloClient.query(RepositoryListQuery()) @@ -80,27 +91,36 @@ class StaleFieldsTest { // Repository.stars and Repository.starGazers should be removed assertEquals( setOf( - "Repository:0.stars", - "Repository:0.starGazers", - "Repository:1.stars", - "Repository:1.starGazers", + CacheKey("Repository:0").fieldKey("stars"), + CacheKey("Repository:0").fieldKey("starGazers"), + CacheKey("Repository:1").fieldKey("stars"), + CacheKey("Repository:1").fieldKey("starGazers"), ), removedFieldsAndRecords.removedFields ) assertEquals( emptySet(), removedFieldsAndRecords.removedRecords ) allRecords = store.accessCache { it.allRecords() } - assertFalse(allRecords["Repository:0"]!!.fields.containsKey("stars")) - assertFalse(allRecords["Repository:0"]!!.fields.containsKey("starGazers")) - assertFalse(allRecords["Repository:1"]!!.fields.containsKey("stars")) - assertFalse(allRecords["Repository:1"]!!.fields.containsKey("starGazers")) + assertFalse(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("stars")) + assertFalse(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("starGazers")) + assertFalse(allRecords[CacheKey("Repository:1")]!!.fields.containsKey("stars")) + assertFalse(allRecords[CacheKey("Repository:1")]!!.fields.containsKey("starGazers")) } } @Test - fun clientControlledRemoveRecords() = runTest { + fun clientControlledRemoveRecordsMemory() = clientControlledRemoveRecords(ApolloStore(MemoryCacheFactory())) + + @Test + fun clientControlledRemoveRecordsSql() = clientControlledRemoveRecords(ApolloStore(SqlNormalizedCacheFactory())) + + @Test + fun clientControlledRemoveRecordsChained() = + clientControlledRemoveRecords(ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory()))) + + private fun clientControlledRemoveRecords(apolloStore: ApolloStore) = runTest { val mockServer = MockServer() - val store = ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory())).also { it.clearAll() } + val store = apolloStore.also { it.clearAll() } ApolloClient.Builder() .serverUrl(mockServer.url()) .store(store) @@ -113,10 +133,10 @@ class StaleFieldsTest { .execute() var allRecords = store.accessCache { it.allRecords() } - assertTrue(allRecords["projects.0"]!!.fields.containsKey("velocity")) - assertTrue(allRecords["projects.0"]!!.fields.containsKey("isUrgent")) - assertTrue(allRecords["projects.1"]!!.fields.containsKey("velocity")) - assertTrue(allRecords["projects.1"]!!.fields.containsKey("isUrgent")) + assertTrue(allRecords[CacheKey("projects").append("0")]!!.fields.containsKey("velocity")) + assertTrue(allRecords[CacheKey("projects").append("0")]!!.fields.containsKey("isUrgent")) + assertTrue(allRecords[CacheKey("projects").append("1")]!!.fields.containsKey("velocity")) + assertTrue(allRecords[CacheKey("projects").append("1")]!!.fields.containsKey("isUrgent")) val maxAgeProvider = SchemaCoordinatesMaxAgeProvider( Cache.maxAges, @@ -126,18 +146,18 @@ class StaleFieldsTest { // Project.velocity has a max age of 60 seconds, so they should be removed / Project.isUrgent has a max age of 90 seconds, so they should be kept assertEquals( setOf( - "projects.0.velocity", - "projects.1.velocity", + CacheKey("projects").append("0").fieldKey("velocity"), + CacheKey("projects").append("1").fieldKey("velocity"), ), removedFieldsAndRecords.removedFields ) assertEquals( emptySet(), removedFieldsAndRecords.removedRecords ) allRecords = store.accessCache { it.allRecords() } - assertFalse(allRecords["projects.0"]!!.fields.containsKey("velocity")) - assertTrue(allRecords["projects.0"]!!.fields.containsKey("isUrgent")) - assertFalse(allRecords["projects.1"]!!.fields.containsKey("velocity")) - assertTrue(allRecords["projects.1"]!!.fields.containsKey("isUrgent")) + assertFalse(allRecords[CacheKey("projects").append("0")]!!.fields.containsKey("velocity")) + assertTrue(allRecords[CacheKey("projects").append("0")]!!.fields.containsKey("isUrgent")) + assertFalse(allRecords[CacheKey("projects").append("1")]!!.fields.containsKey("velocity")) + assertTrue(allRecords[CacheKey("projects").append("1")]!!.fields.containsKey("isUrgent")) mockServer.enqueueString(PROJECT_LIST_RESPONSE) apolloClient.query(ProjectListQuery()) @@ -148,28 +168,37 @@ class StaleFieldsTest { // Project.velocity and Project.isUrgent should be removed, their records being empty they should be removed assertEquals( setOf( - "projects.0.velocity", - "projects.0.isUrgent", - "projects.1.velocity", - "projects.1.isUrgent", + CacheKey("projects").append("0").fieldKey("velocity"), + CacheKey("projects").append("0").fieldKey("isUrgent"), + CacheKey("projects").append("1").fieldKey("velocity"), + CacheKey("projects").append("1").fieldKey("isUrgent"), ), removedFieldsAndRecords.removedFields ) assertEquals( setOf( - CacheKey("projects.0"), - CacheKey("projects.1"), + CacheKey("projects").append("0"), + CacheKey("projects").append("1"), ), removedFieldsAndRecords.removedRecords ) allRecords = store.accessCache { it.allRecords() } - assertFalse(allRecords.containsKey("projects.0")) - assertFalse(allRecords.containsKey("projects.1")) + assertFalse(allRecords.containsKey(CacheKey("projects").append("0"))) + assertFalse(allRecords.containsKey(CacheKey("projects").append("1"))) } } @Test - fun serverControlledRemoveFields() = runTest { + fun serverControlledRemoveFieldsMemory() = serverControlledRemoveFields(ApolloStore(MemoryCacheFactory())) + + @Test + fun serverControlledRemoveFieldsSql() = serverControlledRemoveFields(ApolloStore(SqlNormalizedCacheFactory())) + + @Test + fun serverControlledRemoveFieldsChained() = + serverControlledRemoveFields(ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory()))) + + private fun serverControlledRemoveFields(apolloStore: ApolloStore) = runTest { val mockServer = MockServer() - val store = ApolloStore(MemoryCacheFactory().chain(SqlNormalizedCacheFactory())).also { it.clearAll() } + val store = apolloStore.also { it.clearAll() } ApolloClient.Builder() .serverUrl(mockServer.url()) .store(store) @@ -182,33 +211,33 @@ class StaleFieldsTest { .execute() var allRecords = store.accessCache { it.allRecords() } - assertTrue(allRecords["Repository:0"]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:0"]!!.fields.containsKey("starGazers")) - assertTrue(allRecords["Repository:1"]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:1"]!!.fields.containsKey("starGazers")) + assertTrue(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("stars")) + assertTrue(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("starGazers")) + assertTrue(allRecords[CacheKey("Repository:1")]!!.fields.containsKey("stars")) + assertTrue(allRecords[CacheKey("Repository:1")]!!.fields.containsKey("starGazers")) var removedFieldsAndRecords = store.removeStaleFields(GlobalMaxAgeProvider(Duration.INFINITE)) // Everything is stale assertEquals( setOf( - "Repository:0.__typename", - "Repository:0.id", - "Repository:0.stars", - "Repository:0.starGazers", - "User:0.__typename", - "User:0.id", - "User:0.name", - "Repository:1.__typename", - "Repository:1.id", - "Repository:1.stars", - "Repository:1.starGazers", - "User:2.__typename", - "User:2.id", - "User:2.name", - "QUERY_ROOT.repositories({\"first\":15})", - "User:1.__typename", - "User:1.id", - "User:1.name" + CacheKey("Repository:0").fieldKey("__typename"), + CacheKey("Repository:0").fieldKey("id"), + CacheKey("Repository:0").fieldKey("stars"), + CacheKey("Repository:0").fieldKey("starGazers"), + CacheKey("User:0").fieldKey("__typename"), + CacheKey("User:0").fieldKey("id"), + CacheKey("User:0").fieldKey("name"), + CacheKey("Repository:1").fieldKey("__typename"), + CacheKey("Repository:1").fieldKey("id"), + CacheKey("Repository:1").fieldKey("stars"), + CacheKey("Repository:1").fieldKey("starGazers"), + CacheKey("User:2").fieldKey("__typename"), + CacheKey("User:2").fieldKey("id"), + CacheKey("User:2").fieldKey("name"), + CacheKey("QUERY_ROOT").fieldKey("repositories({\"first\":15})"), + CacheKey("User:1").fieldKey("__typename"), + CacheKey("User:1").fieldKey("id"), + CacheKey("User:1").fieldKey("name"), ), removedFieldsAndRecords.removedFields ) assertEquals( @@ -241,10 +270,10 @@ class StaleFieldsTest { removedFieldsAndRecords.removedRecords ) allRecords = store.accessCache { it.allRecords() } - assertTrue(allRecords["Repository:0"]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:0"]!!.fields.containsKey("starGazers")) - assertTrue(allRecords["Repository:1"]!!.fields.containsKey("stars")) - assertTrue(allRecords["Repository:1"]!!.fields.containsKey("starGazers")) + assertTrue(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("stars")) + assertTrue(allRecords[CacheKey("Repository:0")]!!.fields.containsKey("starGazers")) + assertTrue(allRecords[CacheKey("Repository:1")]!!.fields.containsKey("stars")) + assertTrue(allRecords[CacheKey("Repository:1")]!!.fields.containsKey("starGazers")) } } diff --git a/tests/include-skip-operation-based/src/commonTest/kotlin/IncludeTest.kt b/tests/include-skip-operation-based/src/commonTest/kotlin/IncludeTest.kt index fb5767aa..d932a362 100644 --- a/tests/include-skip-operation-based/src/commonTest/kotlin/IncludeTest.kt +++ b/tests/include-skip-operation-based/src/commonTest/kotlin/IncludeTest.kt @@ -5,6 +5,7 @@ import com.apollographql.apollo.api.Optional import com.apollographql.apollo.api.json.MapJsonReader import com.apollographql.apollo.api.toApolloResponse import com.apollographql.apollo.testing.internal.runTest +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.internal.normalized import com.example.GetCatIncludeVariableWithDefaultQuery import com.example.SkipFragmentWithDefaultToFalseQuery @@ -29,7 +30,7 @@ class IncludeTest { } val normalized = data.normalized(operation) - assertNull((normalized["animal"] as Map<*, *>)["species"]) + assertNull((normalized[CacheKey("animal")] as Map<*, *>)["species"]) } @Test @@ -43,6 +44,6 @@ class IncludeTest { } val normalized = data.normalized(operation) - assertNull((normalized["animal"] as Map<*, *>)["barf"]) + assertNull((normalized[CacheKey("animal")] as Map<*, *>)["barf"]) } } diff --git a/tests/migration/src/commonTest/kotlin/MigrationTest.kt b/tests/migration/src/commonTest/kotlin/MigrationTest.kt index eee75de2..521b3c6a 100644 --- a/tests/migration/src/commonTest/kotlin/MigrationTest.kt +++ b/tests/migration/src/commonTest/kotlin/MigrationTest.kt @@ -169,7 +169,7 @@ private fun LegacyNormalizedCache.allRecords(): List { } private fun LegacyRecord.toRecord(): Record = Record( - key = key, + key = CacheKey(key), fields = fields.mapValues { (_, value) -> value.toRecordValue() }, mutationId = mutationId ) diff --git a/tests/normalized-cache/build.gradle.kts b/tests/normalized-cache/build.gradle.kts index 9f3bbebf..babe8e4c 100644 --- a/tests/normalized-cache/build.gradle.kts +++ b/tests/normalized-cache/build.gradle.kts @@ -31,6 +31,7 @@ kotlin { implementation(libs.apollo.mockserver) implementation(libs.kotlin.test) implementation(libs.turbine) + implementation("com.apollographql.cache:test-utils") } } diff --git a/tests/normalized-cache/src/commonTest/kotlin/CacheFlagsTest.kt b/tests/normalized-cache/src/commonTest/kotlin/CacheFlagsTest.kt index 723d4445..43c6f44d 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/CacheFlagsTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/CacheFlagsTest.kt @@ -24,7 +24,6 @@ import kotlinx.coroutines.flow.Flow import kotlinx.coroutines.flow.map import normalizer.HeroNameQuery import kotlin.test.Test -import kotlin.test.assertEquals import kotlin.test.assertIs import kotlin.test.assertNotNull @@ -51,29 +50,6 @@ class CacheFlagsTest { ) } - @Test - fun testEvictAfterRead() = runTest(before = { setUp() }) { - val query = HeroNameQuery() - val data = HeroNameQuery.Data(HeroNameQuery.Hero("R2-D2")) - apolloClient.enqueueTestResponse(query, data) - - // Store the data - apolloClient.query(query).fetchPolicy(FetchPolicy.NetworkOnly).execute() - - // This should work and evict the entries - val response = apolloClient.query(query) - .fetchPolicy(FetchPolicy.CacheOnly) - .cacheHeaders(CacheHeaders.builder().addHeader(ApolloCacheHeaders.EVICT_AFTER_READ, "true").build()) - .execute() - - assertEquals("R2-D2", response.data?.hero?.name) - - // Second time should fail - assertIs( - apolloClient.query(query).fetchPolicy(FetchPolicy.CacheOnly).execute().exception - ) - } - private val partialResponseData = HeroNameQuery.Data(null) private val partialResponseErrors = listOf( Error.Builder(message = "An error Happened") diff --git a/tests/normalized-cache/src/commonTest/kotlin/FetchPolicyTest.kt b/tests/normalized-cache/src/commonTest/kotlin/FetchPolicyTest.kt index 86672212..ca6cd9b3 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/FetchPolicyTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/FetchPolicyTest.kt @@ -31,6 +31,7 @@ import com.apollographql.cache.normalized.isFromCache import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.refetchPolicyInterceptor import com.apollographql.cache.normalized.store +import com.apollographql.cache.normalized.testing.fieldKey import com.apollographql.cache.normalized.watch import com.apollographql.mockserver.MockServer import com.apollographql.mockserver.awaitRequest @@ -603,7 +604,7 @@ class FetchPolicyTest { ) } ) - store.publish(setOf("${CacheKey.rootKey().key}.hero")) + store.publish(setOf(CacheKey.rootKey().fieldKey("hero"))) /** * This time the watcher should do a network request diff --git a/tests/normalized-cache/src/commonTest/kotlin/MemoryCacheTest.kt b/tests/normalized-cache/src/commonTest/kotlin/MemoryCacheTest.kt index 6b80fae0..22f81e8b 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/MemoryCacheTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/MemoryCacheTest.kt @@ -2,6 +2,7 @@ package test import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.api.CacheHeaders +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.DefaultRecordMerger import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.memory.MemoryCache @@ -14,7 +15,7 @@ class MemoryCacheTest { @Test fun testDoesNotExpireBeforeMillis() = runTest { val record = Record( - key = "key", + key = CacheKey("key"), fields = mapOf( "field" to "value" ) diff --git a/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt b/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt index e75a07ed..2a08e52d 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/NormalizerTest.kt @@ -10,6 +10,7 @@ import com.apollographql.cache.normalized.api.NormalizedCache import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.internal.normalized import com.apollographql.cache.normalized.memory.MemoryCacheFactory +import com.apollographql.cache.normalized.testing.append import httpcache.AllPlanetsQuery import normalizer.EpisodeHeroNameQuery import normalizer.HeroAndFriendsNamesQuery @@ -33,8 +34,6 @@ import kotlin.test.assertTrue class NormalizerTest { private lateinit var normalizedCache: NormalizedCache - private val rootKey = "QUERY_ROOT" - @BeforeTest fun setUp() { normalizedCache = MemoryCacheFactory().create() @@ -44,10 +43,10 @@ class NormalizerTest { @Throws(Exception::class) fun testHeroName() { val records = records(HeroNameQuery(), "HeroNameResponse.json") - val record = records.get(rootKey) + val record = records.get(CacheKey.rootKey()) val reference = record!!["hero"] as CacheKey? assertEquals(reference, CacheKey("hero")) - val heroRecord = records.get(reference!!.key) + val heroRecord = records.get(reference!!) assertEquals(heroRecord!!["name"], "R2-D2") } @@ -55,30 +54,30 @@ class NormalizerTest { @Throws(Exception::class) fun testMergeNull() { val record = Record( - key = "Key", + key = CacheKey("Key"), fields = mapOf("field1" to "value1"), ) normalizedCache.merge(listOf(record), CacheHeaders.NONE, DefaultRecordMerger) val newRecord = Record( - key = "Key", + key = CacheKey("Key"), fields = mapOf("field2" to null), ) normalizedCache.merge(listOf(newRecord), CacheHeaders.NONE, DefaultRecordMerger) val finalRecord = normalizedCache.loadRecord(record.key, CacheHeaders.NONE) assertTrue(finalRecord!!.containsKey("field2")) - normalizedCache.remove(CacheKey(record.key), false) + normalizedCache.remove(record.key, false) } @Test @Throws(Exception::class) fun testHeroNameWithVariable() { val records = records(EpisodeHeroNameQuery(Episode.JEDI), "EpisodeHeroNameResponse.json") - val record = records.get(rootKey) + val record = records.get(CacheKey.rootKey()) val reference = record!![TEST_FIELD_KEY_JEDI] as CacheKey? assertEquals(reference, CacheKey(TEST_FIELD_KEY_JEDI)) - val heroRecord = records.get(reference!!.key) + val heroRecord = records.get(reference!!) assertEquals(heroRecord!!["name"], "R2-D2") } @@ -87,12 +86,12 @@ class NormalizerTest { fun testHeroAppearsInQuery() { val records = records(HeroAppearsInQuery(), "HeroAppearsInResponse.json") - val rootRecord = records.get(rootKey)!! + val rootRecord = records.get(CacheKey.rootKey())!! val heroReference = rootRecord["hero"] as CacheKey? assertEquals(heroReference, CacheKey("hero")) - val hero = records.get(heroReference!!.key) + val hero = records.get(heroReference!!) assertEquals(hero?.get("appearsIn"), listOf("NEWHOPE", "EMPIRE", "JEDI")) } @@ -100,20 +99,20 @@ class NormalizerTest { @Throws(Exception::class) fun testHeroAndFriendsNamesQueryWithoutIDs() { val records = records(HeroAndFriendsNamesQuery(Episode.JEDI), "HeroAndFriendsNameResponse.json") - val record = records.get(rootKey) + val record = records.get(CacheKey.rootKey()) val heroReference = record!![TEST_FIELD_KEY_JEDI] as CacheKey? assertEquals(heroReference, CacheKey(TEST_FIELD_KEY_JEDI)) - val heroRecord = records.get(heroReference!!.key) + val heroRecord = records.get(heroReference!!) assertEquals(heroRecord!!["name"], "R2-D2") assertEquals( listOf( - CacheKey("$TEST_FIELD_KEY_JEDI.friends.0"), - CacheKey("$TEST_FIELD_KEY_JEDI.friends.1"), - CacheKey("$TEST_FIELD_KEY_JEDI.friends.2") + CacheKey(TEST_FIELD_KEY_JEDI).append("friends", "0"), + CacheKey(TEST_FIELD_KEY_JEDI).append("friends", "1"), + CacheKey(TEST_FIELD_KEY_JEDI).append("friends", "2"), ), heroRecord["friends"] ) - val luke = records.get("$TEST_FIELD_KEY_JEDI.friends.0") + val luke = records.get(CacheKey(TEST_FIELD_KEY_JEDI).append("friends", "0")) assertEquals(luke!!["name"], "Luke Skywalker") } @@ -121,10 +120,10 @@ class NormalizerTest { @Throws(Exception::class) fun testHeroAndFriendsNamesQueryWithIDs() { val records = records(HeroAndFriendsNamesWithIDsQuery(Episode.JEDI), "HeroAndFriendsNameWithIdsResponse.json") - val record = records.get(rootKey) + val record = records.get(CacheKey.rootKey()) val heroReference = record!![TEST_FIELD_KEY_JEDI] as CacheKey? assertEquals(CacheKey("Character:2001"), heroReference) - val heroRecord = records.get(heroReference!!.key) + val heroRecord = records.get(heroReference!!) assertEquals(heroRecord!!["name"], "R2-D2") assertEquals( listOf( @@ -134,7 +133,7 @@ class NormalizerTest { ), heroRecord["friends"] ) - val luke = records.get("Character:1000") + val luke = records.get(CacheKey("Character:1000")) assertEquals(luke!!["name"], "Luke Skywalker") } @@ -142,20 +141,20 @@ class NormalizerTest { @Throws(Exception::class) fun testHeroAndFriendsNamesWithIDForParentOnly() { val records = records(HeroAndFriendsNamesWithIDForParentOnlyQuery(Episode.JEDI), "HeroAndFriendsNameWithIdsParentOnlyResponse.json") - val record = records[rootKey] + val record = records[CacheKey.rootKey()] val heroReference = record!![TEST_FIELD_KEY_JEDI] as CacheKey? assertEquals(CacheKey("Character:2001"), heroReference) - val heroRecord = records.get(heroReference!!.key) + val heroRecord = records.get(heroReference!!) assertEquals(heroRecord!!["name"], "R2-D2") assertEquals( listOf( - CacheKey("Character:2001.friends.0"), - CacheKey("Character:2001.friends.1"), - CacheKey("Character:2001.friends.2") + CacheKey("Character:2001").append("friends", "0"), + CacheKey("Character:2001").append("friends", "1"), + CacheKey("Character:2001").append("friends", "2") ), heroRecord["friends"] ) - val luke = records.get("Character:2001.friends.0") + val luke = records.get(CacheKey("Character:2001").append("friends", "0")) assertEquals(luke!!["name"], "Luke Skywalker") } @@ -163,9 +162,9 @@ class NormalizerTest { @Throws(Exception::class) fun testSameHeroTwiceQuery() { val records = records(SameHeroTwiceQuery(), "SameHeroTwiceResponse.json") - val record = records.get(rootKey) + val record = records.get(CacheKey.rootKey()) val heroReference = record!!["hero"] as CacheKey? - val hero = records.get(heroReference!!.key) + val hero = records.get(heroReference!!) assertEquals(hero!!["name"], "R2-D2") assertEquals(hero["appearsIn"], listOf("NEWHOPE", "EMPIRE", "JEDI")) @@ -175,9 +174,9 @@ class NormalizerTest { @Throws(Exception::class) fun testHeroTypeDependentAliasedFieldQueryDroid() { val records = records(HeroTypeDependentAliasedFieldQuery(Episode.JEDI), "HeroTypeDependentAliasedFieldResponse.json") - val record = records.get(rootKey) + val record = records.get(CacheKey.rootKey()) val heroReference = record!![TEST_FIELD_KEY_JEDI] as CacheKey? - val hero = records.get(heroReference!!.key) + val hero = records.get(heroReference!!) assertEquals(hero!!["primaryFunction"], "Astromech") assertEquals(hero["__typename"], "Droid") } @@ -186,9 +185,9 @@ class NormalizerTest { @Throws(Exception::class) fun testHeroTypeDependentAliasedFieldQueryHuman() { val records = records(HeroTypeDependentAliasedFieldQuery(Episode.EMPIRE), "HeroTypeDependentAliasedFieldResponseHuman.json") - val record = records.get(rootKey) + val record = records.get(CacheKey.rootKey()) val heroReference = record!![TEST_FIELD_KEY_EMPIRE] as CacheKey? - val hero = records.get(heroReference!!.key) + val hero = records.get(heroReference!!) assertEquals(hero!!["homePlanet"], "Tatooine") assertEquals(hero["__typename"], "Human") } @@ -197,9 +196,9 @@ class NormalizerTest { @Throws(Exception::class) fun testHeroParentTypeDependentAliasedFieldQueryHuman() { val records = records(HeroTypeDependentAliasedFieldQuery(Episode.EMPIRE), "HeroTypeDependentAliasedFieldResponseHuman.json") - val record = records.get(rootKey) + val record = records.get(CacheKey.rootKey()) val heroReference = record!![TEST_FIELD_KEY_EMPIRE] as CacheKey? - val hero = records.get(heroReference!!.key) + val hero = records.get(heroReference!!) assertEquals(hero!!["homePlanet"], "Tatooine") assertEquals(hero["__typename"], "Human") } @@ -208,29 +207,29 @@ class NormalizerTest { @Throws(Exception::class) fun testHeroParentTypeDependentFieldDroid() { val records = records(HeroParentTypeDependentFieldQuery(Episode.JEDI), "HeroParentTypeDependentFieldDroidResponse.json") - val lukeRecord = records.get(TEST_FIELD_KEY_JEDI + ".friends.0") + val lukeRecord = records.get(CacheKey(TEST_FIELD_KEY_JEDI).append("friends", "0")) assertEquals(lukeRecord!!["name"], "Luke Skywalker") assertEquals(lukeRecord["height({\"unit\":\"METER\"})"], 1.72) - val friends = records[TEST_FIELD_KEY_JEDI]!!["friends"] + val friends = records[CacheKey(TEST_FIELD_KEY_JEDI)]!!["friends"] assertIs>(friends) - assertEquals(friends[0], CacheKey("$TEST_FIELD_KEY_JEDI.friends.0")) - assertEquals(friends[1], CacheKey("$TEST_FIELD_KEY_JEDI.friends.1")) - assertEquals(friends[2], CacheKey("$TEST_FIELD_KEY_JEDI.friends.2")) + assertEquals(friends[0], CacheKey(TEST_FIELD_KEY_JEDI).append("friends", "0")) + assertEquals(friends[1], CacheKey(TEST_FIELD_KEY_JEDI).append("friends", "1")) + assertEquals(friends[2], CacheKey(TEST_FIELD_KEY_JEDI).append("friends", "2")) } @Test fun list_of_objects_with_null_object() { val records = records(AllPlanetsQuery(), "AllPlanetsListOfObjectWithNullObject.json") - val fieldKey = "allPlanets({\"first\":300})" + val fieldKey = CacheKey("allPlanets({\"first\":300})") - var record: Record? = records["$fieldKey.planets.0"] + var record: Record? = records[fieldKey.append("planets", "0")] assertTrue(record?.get("filmConnection") == null) - record = records.get("$fieldKey.planets.0.filmConnection") + record = records.get(fieldKey.append("planets", "0", "filmConnection")) assertTrue(record == null) - record = records.get("$fieldKey.planets.1.filmConnection") + record = records.get(fieldKey.append("planets", "1", "filmConnection")) assertTrue(record != null) } @@ -240,13 +239,13 @@ class NormalizerTest { fun testHeroParentTypeDependentFieldHuman() { val records = records(HeroParentTypeDependentFieldQuery(Episode.EMPIRE), "HeroParentTypeDependentFieldHumanResponse.json") - val lukeRecord = records.get("$TEST_FIELD_KEY_EMPIRE.friends.0") + val lukeRecord = records.get(CacheKey(TEST_FIELD_KEY_EMPIRE).append("friends", "0")) assertEquals(lukeRecord!!["name"], "Han Solo") assertEquals(lukeRecord["height({\"unit\":\"FOOT\"})"], 5.905512) } companion object { - internal fun records(operation: Operation, name: String): Map { + internal fun records(operation: Operation, name: String): Map { val response = testFixtureToJsonReader(name).toApolloResponse(operation) return response.data!!.normalized(operation, cacheKeyGenerator = IdCacheKeyGenerator()) } diff --git a/tests/normalized-cache/src/commonTest/kotlin/OtherCacheTest.kt b/tests/normalized-cache/src/commonTest/kotlin/OtherCacheTest.kt index bbc92fc2..cad1d811 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/OtherCacheTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/OtherCacheTest.kt @@ -6,11 +6,13 @@ import com.apollographql.apollo.exception.CacheMissException import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.FetchPolicy +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.IdCacheKeyGenerator import com.apollographql.cache.normalized.api.IdCacheKeyResolver import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.store +import com.apollographql.cache.normalized.testing.keyToString import com.apollographql.mockserver.MockServer import com.apollographql.mockserver.enqueueString import normalizer.CharacterDetailsQuery @@ -72,7 +74,7 @@ class OtherCacheTest { // Some details are not present in the master query, we should get a cache miss val e = apolloClient.query(CharacterDetailsQuery("1002")).fetchPolicy(FetchPolicy.CacheOnly).execute().exception as CacheMissException - assertTrue(e.message!!.contains("Object 'Character:1002' has no field named '__typename'")) + assertTrue(e.message!!.contains("Object '${CacheKey("Character:1002").keyToString()}' has no field named '__typename'")) } @@ -82,7 +84,7 @@ class OtherCacheTest { .fetchPolicy(FetchPolicy.CacheOnly) .execute() .exception!! - assertTrue(e.message!!.contains("Object 'QUERY_ROOT' has no field named 'hero")) + assertTrue(e.message!!.contains("Object '${CacheKey("QUERY_ROOT").keyToString()}' has no field named 'hero")) } @Test diff --git a/tests/normalized-cache/src/commonTest/kotlin/StoreTest.kt b/tests/normalized-cache/src/commonTest/kotlin/StoreTest.kt index 5ac9b9d9..3c36c9bd 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/StoreTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/StoreTest.kt @@ -112,11 +112,9 @@ class StoreTest { storeAllFriends() store.accessCache { - it.remove("Character:10%") + it.remove(CacheKey("Character:1000"), false) } assertFriendIsNotCached("1000") - assertFriendIsNotCached("1002") - assertFriendIsNotCached("1003") } @Test diff --git a/tests/normalized-cache/src/commonTest/kotlin/ThreadTests.kt b/tests/normalized-cache/src/commonTest/kotlin/ThreadTests.kt index e424371f..29538744 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/ThreadTests.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/ThreadTests.kt @@ -62,28 +62,21 @@ class ThreadTests { return delegate.remove(cacheKeys, cascade) } - override fun remove(pattern: String): Int { - check(currentThreadId() != mainThreadId) { - "Cache access on main thread" - } - return delegate.remove(pattern) - } - - override fun loadRecord(key: String, cacheHeaders: CacheHeaders): Record? { + override fun loadRecord(key: CacheKey, cacheHeaders: CacheHeaders): Record? { check(currentThreadId() != mainThreadId) { "Cache access on main thread" } return delegate.loadRecord(key, cacheHeaders) } - override fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection { + override fun loadRecords(keys: Collection, cacheHeaders: CacheHeaders): Collection { check(currentThreadId() != mainThreadId) { "Cache access on main thread" } return delegate.loadRecords(keys, cacheHeaders) } - override fun dump(): Map, Map> { + override fun dump(): Map, Map> { check(currentThreadId() != mainThreadId) { "Cache access on main thread" } diff --git a/tests/normalized-cache/src/commonTest/kotlin/fragmentnormalizer/FragmentNormalizerTest.kt b/tests/normalized-cache/src/commonTest/kotlin/fragmentnormalizer/FragmentNormalizerTest.kt index 3f2729f3..4805f18f 100644 --- a/tests/normalized-cache/src/commonTest/kotlin/fragmentnormalizer/FragmentNormalizerTest.kt +++ b/tests/normalized-cache/src/commonTest/kotlin/fragmentnormalizer/FragmentNormalizerTest.kt @@ -9,6 +9,7 @@ import com.apollographql.cache.normalized.apolloStore import com.apollographql.cache.normalized.internal.normalized import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.normalizedCache +import com.apollographql.cache.normalized.testing.append import fragmentnormalizer.fragment.ConversationFragment import fragmentnormalizer.fragment.ConversationFragmentImpl import kotlin.test.Test @@ -95,10 +96,10 @@ class FragmentNormalizerTest { val records = fragment.normalized( ConversationFragmentImpl(), - rootKey = "1", + rootKey = CacheKey("1"), cacheKeyGenerator = IdCacheKeyGenerator(), ) - assertContains(records.keys, "1.author") + assertContains(records.keys, CacheKey("1").append("author")) } } diff --git a/tests/normalized-cache/src/concurrentTest/kotlin/MemoryCacheOnlyTest.kt b/tests/normalized-cache/src/concurrentTest/kotlin/MemoryCacheOnlyTest.kt index 481bab89..76c03ef6 100644 --- a/tests/normalized-cache/src/concurrentTest/kotlin/MemoryCacheOnlyTest.kt +++ b/tests/normalized-cache/src/concurrentTest/kotlin/MemoryCacheOnlyTest.kt @@ -5,6 +5,7 @@ import com.apollographql.apollo.testing.enqueueTestResponse import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.FetchPolicy +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.memory.MemoryCache @@ -27,7 +28,7 @@ class MemoryCacheOnlyTest { val query = GetUserQuery() apolloClient.enqueueTestResponse(query, GetUserQuery.Data(GetUserQuery.User("John", "a@a.com"))) apolloClient.query(query).memoryCacheOnly(true).execute() - val dump: Map, Map> = store.dump() + val dump: Map, Map> = store.dump() assertEquals(2, dump[MemoryCache::class]!!.size) assertEquals(0, dump[SqlNormalizedCache::class]!!.size) } diff --git a/tests/normalized-cache/src/jvmTest/kotlin/CacheMissLoggingInterceptorTest.kt b/tests/normalized-cache/src/jvmTest/kotlin/CacheMissLoggingInterceptorTest.kt index 403db40c..f80e4831 100644 --- a/tests/normalized-cache/src/jvmTest/kotlin/CacheMissLoggingInterceptorTest.kt +++ b/tests/normalized-cache/src/jvmTest/kotlin/CacheMissLoggingInterceptorTest.kt @@ -3,10 +3,12 @@ package test import com.apollographql.apollo.ApolloClient import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.FetchPolicy +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.fetchPolicy import com.apollographql.cache.normalized.logCacheMisses import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.normalizedCache +import com.apollographql.cache.normalized.testing.keyToString import com.apollographql.mockserver.MockServer import com.apollographql.mockserver.enqueueString import normalizer.HeroAppearsInQuery @@ -53,8 +55,8 @@ class CacheMissLoggingInterceptorTest { assertEquals( listOf( - "Object 'QUERY_ROOT' has no field named 'hero'", - "Object 'hero' has no field named 'appearsIn'" + "Object '${CacheKey("QUERY_ROOT").keyToString()}' has no field named 'hero'", + "Object '${CacheKey("hero").keyToString()}' has no field named 'appearsIn'" ), recordedLogs ) diff --git a/tests/normalized-cache/src/jvmTest/kotlin/WriteToCacheAsynchronouslyTest.kt b/tests/normalized-cache/src/jvmTest/kotlin/WriteToCacheAsynchronouslyTest.kt index 0562426f..755545fb 100644 --- a/tests/normalized-cache/src/jvmTest/kotlin/WriteToCacheAsynchronouslyTest.kt +++ b/tests/normalized-cache/src/jvmTest/kotlin/WriteToCacheAsynchronouslyTest.kt @@ -4,6 +4,7 @@ import com.apollographql.apollo.ApolloClient import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.api.CacheHeaders +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.store import com.apollographql.cache.normalized.writeToCacheAsynchronously @@ -81,6 +82,6 @@ class WriteToCacheAsynchronouslyTest { } companion object { - const val QUERY_ROOT_KEY = "QUERY_ROOT" + val QUERY_ROOT_KEY = CacheKey.rootKey() } } diff --git a/tests/pagination/src/commonTest/kotlin/ConnectionPaginationTest.kt b/tests/pagination/src/commonTest/kotlin/ConnectionPaginationTest.kt index 5bab8abb..c11d70aa 100644 --- a/tests/pagination/src/commonTest/kotlin/ConnectionPaginationTest.kt +++ b/tests/pagination/src/commonTest/kotlin/ConnectionPaginationTest.kt @@ -346,7 +346,6 @@ class ConnectionPaginationTest { assertChainedCachesAreEqual(apolloStore) } - @Test fun errorMemoryCache() { errorTest(MemoryCacheFactory()) diff --git a/tests/pagination/src/commonTest/kotlin/OffsetBasedWithPagePaginationTest.kt b/tests/pagination/src/commonTest/kotlin/OffsetBasedWithPagePaginationTest.kt index e6e4ddec..bcc3724e 100644 --- a/tests/pagination/src/commonTest/kotlin/OffsetBasedWithPagePaginationTest.kt +++ b/tests/pagination/src/commonTest/kotlin/OffsetBasedWithPagePaginationTest.kt @@ -4,6 +4,7 @@ import com.apollographql.apollo.api.Optional import com.apollographql.apollo.api.json.ApolloJsonElement import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.ApolloStore +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.FieldPolicyCacheResolver import com.apollographql.cache.normalized.api.FieldRecordMerger import com.apollographql.cache.normalized.api.MetadataGenerator @@ -233,12 +234,12 @@ internal fun assertChainedCachesAreEqual(apolloStore: ApolloStore) { } if (dump.size < 2) return val caches = dump.values.toList() - val cache1: Map = caches[0] - val cache2: Map = caches[1] + val cache1: Map = caches[0] + val cache2: Map = caches[1] for (key in cache1.keys) { val record1 = cache1[key]!! val record2 = cache2[key]!! - assertEquals(record1.key, record2.key) + assertEquals(record1.key.key, record2.key.key) assertEquals(record1.fields, record2.fields) assertEquals(record1.metadata, record2.metadata) } diff --git a/tests/partial-results/build.gradle.kts b/tests/partial-results/build.gradle.kts index f0e14739..5941d46c 100644 --- a/tests/partial-results/build.gradle.kts +++ b/tests/partial-results/build.gradle.kts @@ -24,6 +24,7 @@ kotlin { implementation(libs.apollo.testing.support) implementation(libs.apollo.mockserver) implementation(libs.kotlin.test) + implementation("com.apollographql.cache:test-utils") } } diff --git a/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt b/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt index 2717146b..0db6be13 100644 --- a/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt +++ b/tests/partial-results/src/commonTest/kotlin/test/CachePartialResultTest.kt @@ -27,6 +27,8 @@ import com.apollographql.cache.normalized.memory.MemoryCacheFactory import com.apollographql.cache.normalized.normalizedCache import com.apollographql.cache.normalized.store import com.apollographql.cache.normalized.storeReceivedDate +import com.apollographql.cache.normalized.testing.append +import com.apollographql.cache.normalized.testing.keyToString import com.apollographql.mockserver.MockServer import com.apollographql.mockserver.enqueueString import kotlinx.coroutines.flow.Flow @@ -124,7 +126,9 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Object 'User:1' has no field named 'nickName' in the cache").path(listOf("me", "nickName")).build() + Error.Builder("Object '${CacheKey("User:1").keyToString()}' has no field named 'nickName' in the cache") + .path(listOf("me", "nickName")) + .build() ), cacheMissResult.errors ) @@ -306,7 +310,7 @@ class CachePartialResultTest { ) // Remove project lead from the cache - apolloClient.apolloStore.remove(CacheKey("User", "3")) + apolloClient.apolloStore.remove(CacheKey("User:3")) val cacheResult = apolloClient.query(MeWithBestFriendQuery()) .fetchPolicyInterceptor(PartialCacheOnlyInterceptor) .execute() @@ -342,13 +346,15 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Object 'User:3' not found in the cache").path(listOf("me", "projects", 0, "lead")).build() + Error.Builder("Object '${CacheKey("User:3").keyToString()}' not found in the cache") + .path(listOf("me", "projects", 0, "lead")) + .build() ), cacheResult.errors ) // Remove best friend from the cache - apolloClient.apolloStore.remove(CacheKey("User", "2")) + apolloClient.apolloStore.remove(CacheKey("User:2")) val cacheResult2 = apolloClient.query(MeWithBestFriendQuery()) .fetchPolicyInterceptor(PartialCacheOnlyInterceptor) .execute() @@ -379,14 +385,17 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Object 'User:2' not found in the cache").path(listOf("me", "bestFriend")).build(), - Error.Builder("Object 'User:3' not found in the cache").path(listOf("me", "projects", 0, "lead")).build(), + Error.Builder("Object '${CacheKey("User:2").keyToString()}' not found in the cache").path(listOf("me", "bestFriend")) + .build(), + Error.Builder("Object '${CacheKey("User:3").keyToString()}' not found in the cache") + .path(listOf("me", "projects", 0, "lead")) + .build(), ), cacheResult2.errors ) // Remove project user from the cache - apolloClient.apolloStore.remove(CacheKey("User", "4")) + apolloClient.apolloStore.remove(CacheKey("User:4")) val cacheResult3 = apolloClient.query(MeWithBestFriendQuery()) .fetchPolicyInterceptor(PartialCacheOnlyInterceptor) .execute() @@ -394,9 +403,14 @@ class CachePartialResultTest { assertNull(cacheResult3.data) assertErrorsEquals( listOf( - Error.Builder("Object 'User:2' not found in the cache").path(listOf("me", "bestFriend")).build(), - Error.Builder("Object 'User:3' not found in the cache").path(listOf("me", "projects", 0, "lead")).build(), - Error.Builder("Object 'User:4' not found in the cache").path(listOf("me", "projects", 0, "users", 0)).build() + Error.Builder("Object '${CacheKey("User:2").keyToString()}' not found in the cache").path(listOf("me", "bestFriend")) + .build(), + Error.Builder("Object '${CacheKey("User:3").keyToString()}' not found in the cache") + .path(listOf("me", "projects", 0, "lead")) + .build(), + Error.Builder("Object '${CacheKey("User:4").keyToString()}' not found in the cache") + .path(listOf("me", "projects", 0, "users", 0)) + .build() ), cacheResult3.errors ) @@ -529,8 +543,8 @@ class CachePartialResultTest { // Remove the category from the cache apolloClient.apolloStore.accessCache { cache -> - val record = cache.loadRecord("User:1", CacheHeaders.NONE)!! - cache.remove(CacheKey("User", "1"), false) + val record = cache.loadRecord(CacheKey("User:1"), CacheHeaders.NONE)!! + cache.remove(CacheKey("User:1"), false) cache.merge(Record(record.key, record.fields - "category"), CacheHeaders.NONE, DefaultRecordMerger) } val cacheMissResult = apolloClient.query(UserByCategoryQuery(Category(2, "Second"))) @@ -540,7 +554,9 @@ class CachePartialResultTest { assertNull(cacheMissResult.data) assertErrorsEquals( listOf( - Error.Builder("Object 'User:1' has no field named 'category' in the cache").path(listOf("user", "category")).build() + Error.Builder("Object '${CacheKey("User:1").keyToString()}' has no field named 'category' in the cache") + .path(listOf("user", "category")) + .build() ), cacheMissResult.errors ) @@ -630,7 +646,7 @@ class CachePartialResultTest { ) // Remove lead from the cache - apolloClient.apolloStore.remove(CacheKey("User", "2")) + apolloClient.apolloStore.remove(CacheKey("User:2")) val cacheMissResult = apolloClient.query(WithFragmentsQuery()) .fetchPolicyInterceptor(PartialCacheOnlyInterceptor) @@ -667,7 +683,9 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Object 'User:2' not found in the cache").path(listOf("me", "mainProject", "lead0")).build() + Error.Builder("Object '${CacheKey("User:2").keyToString()}' not found in the cache") + .path(listOf("me", "mainProject", "lead0")) + .build() ), cacheMissResult.errors ) @@ -731,7 +749,9 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Field 'nickName' on object 'User:1' is stale in the cache").path(listOf("me", "nickName")).build() + Error.Builder("Field 'nickName' on object '${CacheKey("User:1").keyToString()}' is stale in the cache") + .path(listOf("me", "nickName")) + .build() ), cacheMissResult.errors ) @@ -795,7 +815,10 @@ class CachePartialResultTest { ) assertErrorsEquals( listOf( - Error.Builder("Field 'salary' on object 'User:1.employeeInfo' is stale in the cache") + Error.Builder("Field 'salary' on object '${ + CacheKey("User:1").append("employeeInfo").keyToString() + }' is stale in the cache" + ) .path(listOf("me", "employeeInfo", "salary")).build() ), cacheMissResult.errors diff --git a/tests/store-errors/src/commonTest/kotlin/test/StoreErrorsTest.kt b/tests/store-errors/src/commonTest/kotlin/test/StoreErrorsTest.kt index 5b1e85fb..ae10fcb8 100644 --- a/tests/store-errors/src/commonTest/kotlin/test/StoreErrorsTest.kt +++ b/tests/store-errors/src/commonTest/kotlin/test/StoreErrorsTest.kt @@ -12,6 +12,7 @@ import com.apollographql.apollo.interceptor.ApolloInterceptorChain import com.apollographql.apollo.testing.internal.runTest import com.apollographql.cache.normalized.ApolloStore import com.apollographql.cache.normalized.FetchPolicy +import com.apollographql.cache.normalized.api.CacheKey import com.apollographql.cache.normalized.api.Record import com.apollographql.cache.normalized.api.withErrors import com.apollographql.cache.normalized.errorsReplaceCachedValues @@ -706,17 +707,17 @@ class StoreErrorsTest { query, listOf(Error.Builder("'nickName' can't be reached").path(listOf("me", "nickName")).build()), ) - val normalized: Map = memoryStore.normalize( + val normalized: Map = memoryStore.normalize( executable = query, dataWithErrors = dataWithErrors, customScalarAdapters = CustomScalarAdapters.Empty, ) - assertEquals("User", normalized["User:1"]!!["__typename"]) - assertEquals("1", normalized["User:1"]!!["id"]) - assertEquals("John", normalized["User:1"]!!["firstName"]) - assertEquals("Smith", normalized["User:1"]!!["lastName"]) + assertEquals("User", normalized[CacheKey("User:1")]!!["__typename"]) + assertEquals("1", normalized[CacheKey("User:1")]!!["id"]) + assertEquals("John", normalized[CacheKey("User:1")]!!["firstName"]) + assertEquals("Smith", normalized[CacheKey("User:1")]!!["lastName"]) assertErrorsEquals(Error.Builder("'nickName' can't be reached").path(listOf("me", "nickName")) - .build(), normalized["User:1"]!!["nickName"] as Error + .build(), normalized[CacheKey("User:1")]!!["nickName"] as Error ) }