From b280c0801a358d051fe2d5d2be76b2324db51094 Mon Sep 17 00:00:00 2001 From: Mila <107142260+milaGGL@users.noreply.github.com> Date: Tue, 8 Apr 2025 10:40:54 -0400 Subject: [PATCH 1/8] Add new BSON types to public API (#573) --- firebase-firestore/api.txt | 57 +++ .../firebase/firestore/BsonTypesTest.java | 441 ++++++++++++++++++ .../firebase/firestore/FirestoreTest.java | 324 ++++--------- .../google/firebase/firestore/POJOTest.java | 108 ++++- .../google/firebase/firestore/TypeTest.java | 331 ++++++++++++- .../testutil/IntegrationTestUtil.java | 100 ++++ .../firebase/firestore/BsonBinaryData.java | 118 +++++ .../firebase/firestore/BsonObjectId.java | 59 +++ .../firebase/firestore/BsonTimestamp.java | 72 +++ .../firebase/firestore/DocumentSnapshot.java | 84 ++++ .../google/firebase/firestore/FieldValue.java | 78 ++++ .../google/firebase/firestore/Int32Value.java | 52 +++ .../com/google/firebase/firestore/MaxKey.java | 45 ++ .../com/google/firebase/firestore/MinKey.java | 45 ++ .../google/firebase/firestore/RegexValue.java | 56 +++ .../firebase/firestore/UserDataReader.java | 91 ++++ .../firebase/firestore/UserDataWriter.java | 61 +++ .../firebase/firestore/model/Values.java | 257 ++++++++-- .../firestore/util/CustomClassMapper.java | 101 +++- .../firebase/firestore/BsonTypesTest.java | 151 ++++++ .../firebase/firestore/FieldValueTest.java | 87 ++++ .../firestore/UserDataWriterTest.java | 76 +++ .../bundle/BundleSerializerTest.java | 104 +++++ .../firebase/firestore/core/QueryTest.java | 24 + .../firebase/firestore/model/ValuesTest.java | 123 ++++- .../remote/RemoteSerializerTest.java | 132 ++++++ 26 files changed, 2888 insertions(+), 289 deletions(-) create mode 100644 firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java create mode 100644 firebase-firestore/src/main/java/com/google/firebase/firestore/BsonBinaryData.java create mode 100644 firebase-firestore/src/main/java/com/google/firebase/firestore/BsonObjectId.java create mode 100644 firebase-firestore/src/main/java/com/google/firebase/firestore/BsonTimestamp.java create mode 100644 firebase-firestore/src/main/java/com/google/firebase/firestore/Int32Value.java create mode 100644 firebase-firestore/src/main/java/com/google/firebase/firestore/MaxKey.java create mode 100644 firebase-firestore/src/main/java/com/google/firebase/firestore/MinKey.java create mode 100644 firebase-firestore/src/main/java/com/google/firebase/firestore/RegexValue.java create mode 100644 firebase-firestore/src/test/java/com/google/firebase/firestore/BsonTypesTest.java diff --git a/firebase-firestore/api.txt b/firebase-firestore/api.txt index e3a55cf729c..d461f24b60c 100644 --- a/firebase-firestore/api.txt +++ b/firebase-firestore/api.txt @@ -47,6 +47,28 @@ package com.google.firebase.firestore { method public byte[] toBytes(); } + public final class BsonBinaryData implements java.lang.Comparable { + method public int compareTo(com.google.firebase.firestore.BsonBinaryData); + method public com.google.protobuf.ByteString dataAsByteString(); + method public byte[] dataAsBytes(); + method public static com.google.firebase.firestore.BsonBinaryData fromByteString(int, com.google.protobuf.ByteString); + method public static com.google.firebase.firestore.BsonBinaryData fromBytes(int, byte[]); + method public int subtype(); + } + + public final class BsonObjectId implements java.lang.Comparable { + ctor public BsonObjectId(String); + method public int compareTo(com.google.firebase.firestore.BsonObjectId); + field public final String! value; + } + + public final class BsonTimestamp implements java.lang.Comparable { + ctor public BsonTimestamp(long, long); + method public int compareTo(com.google.firebase.firestore.BsonTimestamp); + field public final long increment; + field public final long seconds; + } + public class CollectionReference extends com.google.firebase.firestore.Query { method public com.google.android.gms.tasks.Task add(Object); method public com.google.firebase.firestore.DocumentReference document(); @@ -109,6 +131,9 @@ package com.google.firebase.firestore { method public T? get(String, Class, com.google.firebase.firestore.DocumentSnapshot.ServerTimestampBehavior); method public com.google.firebase.firestore.Blob? getBlob(String); method public Boolean? getBoolean(String); + method public com.google.firebase.firestore.BsonBinaryData? getBsonBinaryData(String); + method public com.google.firebase.firestore.BsonObjectId? getBsonObjectId(String); + method public com.google.firebase.firestore.BsonTimestamp? getBsonTimestamp(String); method public java.util.Map? getData(); method public java.util.Map? getData(com.google.firebase.firestore.DocumentSnapshot.ServerTimestampBehavior); method public java.util.Date? getDate(String); @@ -117,9 +142,13 @@ package com.google.firebase.firestore { method public Double? getDouble(String); method public com.google.firebase.firestore.GeoPoint? getGeoPoint(String); method public String getId(); + method public com.google.firebase.firestore.Int32Value? getInt32Value(String); method public Long? getLong(String); + method public com.google.firebase.firestore.MaxKey? getMaxKey(String); method public com.google.firebase.firestore.SnapshotMetadata getMetadata(); + method public com.google.firebase.firestore.MinKey? getMinKey(String); method public com.google.firebase.firestore.DocumentReference getReference(); + method public com.google.firebase.firestore.RegexValue? getRegexValue(String); method public String? getString(String); method public com.google.firebase.Timestamp? getTimestamp(String); method public com.google.firebase.Timestamp? getTimestamp(String, com.google.firebase.firestore.DocumentSnapshot.ServerTimestampBehavior); @@ -149,9 +178,16 @@ package com.google.firebase.firestore { public abstract class FieldValue { method public static com.google.firebase.firestore.FieldValue arrayRemove(java.lang.Object!...!); method public static com.google.firebase.firestore.FieldValue arrayUnion(java.lang.Object!...!); + method public static com.google.firebase.firestore.BsonBinaryData bsonBinaryData(int, byte[]); + method public static com.google.firebase.firestore.BsonObjectId bsonObjectId(String); + method public static com.google.firebase.firestore.BsonTimestamp bsonTimestamp(long, long); method public static com.google.firebase.firestore.FieldValue delete(); method public static com.google.firebase.firestore.FieldValue increment(double); method public static com.google.firebase.firestore.FieldValue increment(long); + method public static com.google.firebase.firestore.Int32Value int32(int); + method public static com.google.firebase.firestore.MaxKey maxKey(); + method public static com.google.firebase.firestore.MinKey minKey(); + method public static com.google.firebase.firestore.RegexValue regex(String, String); method public static com.google.firebase.firestore.FieldValue serverTimestamp(); method public static com.google.firebase.firestore.VectorValue vector(double[]); } @@ -303,6 +339,12 @@ package com.google.firebase.firestore { @java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.RUNTIME) @java.lang.annotation.Target({java.lang.annotation.ElementType.TYPE}) public @interface IgnoreExtraProperties { } + public final class Int32Value implements java.lang.Comparable { + ctor public Int32Value(int); + method public int compareTo(com.google.firebase.firestore.Int32Value); + field public final int value; + } + public enum ListenSource { enum_constant public static final com.google.firebase.firestore.ListenSource CACHE; enum_constant public static final com.google.firebase.firestore.ListenSource DEFAULT; @@ -360,6 +402,10 @@ package com.google.firebase.firestore { public interface LocalCacheSettings { } + public final class MaxKey { + method public static com.google.firebase.firestore.MaxKey instance(); + } + public final class MemoryCacheSettings implements com.google.firebase.firestore.LocalCacheSettings { method public com.google.firebase.firestore.MemoryGarbageCollectorSettings getGarbageCollectorSettings(); method public static com.google.firebase.firestore.MemoryCacheSettings.Builder newBuilder(); @@ -396,6 +442,10 @@ package com.google.firebase.firestore { enum_constant public static final com.google.firebase.firestore.MetadataChanges INCLUDE; } + public final class MinKey { + method public static com.google.firebase.firestore.MinKey instance(); + } + public interface OnProgressListener { method public void onProgress(ProgressT); } @@ -491,6 +541,13 @@ package com.google.firebase.firestore { method public java.util.List toObjects(Class, com.google.firebase.firestore.DocumentSnapshot.ServerTimestampBehavior); } + public final class RegexValue implements java.lang.Comparable { + ctor public RegexValue(String, String); + method public int compareTo(com.google.firebase.firestore.RegexValue); + field public final String! options; + field public final String! pattern; + } + @java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.RUNTIME) @java.lang.annotation.Target({java.lang.annotation.ElementType.METHOD, java.lang.annotation.ElementType.FIELD}) public @interface ServerTimestamp { } diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java new file mode 100644 index 00000000000..273f8cb35a4 --- /dev/null +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java @@ -0,0 +1,441 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.assertSDKQueryResultsConsistentWithBackend; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testCollectionOnNightly; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testCollectionWithDocsOnNightly; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.waitFor; +import static com.google.firebase.firestore.testutil.TestUtil.map; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import androidx.test.ext.junit.runners.AndroidJUnit4; +import com.google.firebase.firestore.Query.Direction; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Semaphore; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; +import org.junit.Test; +import org.junit.runner.RunWith; + +@RunWith(AndroidJUnit4.class) +public class BsonTypesTest { + + @Test + public void writeAndReadBsonTypes() throws ExecutionException, InterruptedException { + Map expected = new HashMap<>(); + + DocumentReference docRef = + waitFor( + testCollectionOnNightly() + .add( + map( + "bsonObjectId", FieldValue.bsonObjectId("507f191e810c19729de860ea"), + "regex", FieldValue.regex("^foo", "i"), + "bsonTimestamp", FieldValue.bsonTimestamp(1, 2), + "bsonBinary", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}), + "int32", FieldValue.int32(1), + "minKey", FieldValue.minKey(), + "maxKey", FieldValue.maxKey()))); + + waitFor( + docRef.set( + map( + "bsonObjectId", + FieldValue.bsonObjectId("507f191e810c19729de860eb"), + "regex", + FieldValue.regex("^foo", "m"), + "bsonTimestamp", + FieldValue.bsonTimestamp(1, 3)), + SetOptions.merge())); + + waitFor(docRef.update(map("int32", FieldValue.int32(2)))); + + expected.put("bsonObjectId", FieldValue.bsonObjectId("507f191e810c19729de860eb")); + expected.put("regex", FieldValue.regex("^foo", "m")); + expected.put("bsonTimestamp", FieldValue.bsonTimestamp(1, 3)); + expected.put("bsonBinary", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})); + expected.put("int32", FieldValue.int32(2)); + expected.put("minKey", FieldValue.minKey()); + expected.put("maxKey", FieldValue.maxKey()); + + DocumentSnapshot actual = waitFor(docRef.get()); + + assertTrue(actual.get("bsonObjectId") instanceof BsonObjectId); + assertTrue(actual.get("regex") instanceof RegexValue); + assertTrue(actual.get("bsonTimestamp") instanceof BsonTimestamp); + assertTrue(actual.get("bsonBinary") instanceof BsonBinaryData); + assertTrue(actual.get("int32") instanceof Int32Value); + assertTrue(actual.get("minKey") instanceof MinKey); + assertTrue(actual.get("maxKey") instanceof MaxKey); + assertEquals(expected, actual.getData()); + } + + @Test + public void listenToDocumentsWithBsonTypes() throws Throwable { + final Semaphore semaphore = new Semaphore(0); + ListenerRegistration registration = null; + CollectionReference randomColl = testCollectionOnNightly(); + DocumentReference ref = randomColl.document(); + AtomicReference failureMessage = new AtomicReference(null); + int totalPermits = 5; + + try { + registration = + randomColl + .whereEqualTo("purpose", "Bson types tests") + .addSnapshotListener( + (value, error) -> { + try { + DocumentSnapshot docSnap = + value.isEmpty() ? null : value.getDocuments().get(0); + + switch (semaphore.availablePermits()) { + case 0: + assertNull(docSnap); + ref.set( + map( + "purpose", "Bson types tests", + "bsonObjectId", + FieldValue.bsonObjectId("507f191e810c19729de860ea"), + "regex", FieldValue.regex("^foo", "i"), + "bsonTimestamp", FieldValue.bsonTimestamp(1, 2), + "bsonBinary", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}), + "int32", FieldValue.int32(1), + "minKey", FieldValue.minKey(), + "maxKey", FieldValue.maxKey())); + break; + case 1: + assertNotNull(docSnap); + + assertEquals( + docSnap.getBsonBinaryData("bsonBinary"), + FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})); + assertEquals( + docSnap.getBsonObjectId("bsonObjectId"), + FieldValue.bsonObjectId("507f191e810c19729de860ea")); + assertEquals( + docSnap.getRegexValue("regex"), FieldValue.regex("^foo", "i")); + assertEquals( + docSnap.getBsonTimestamp("bsonTimestamp"), + FieldValue.bsonTimestamp(1, 2)); + assertEquals(docSnap.getInt32Value("int32"), FieldValue.int32(1)); + assertEquals(docSnap.getMinKey("minKey"), FieldValue.minKey()); + assertEquals(docSnap.getMaxKey("maxKey"), FieldValue.maxKey()); + + ref.set( + map( + "purpose", + "Bson types tests", + "bsonObjectId", + FieldValue.bsonObjectId("507f191e810c19729de860eb"), + "regex", + FieldValue.regex("^foo", "m"), + "bsonTimestamp", + FieldValue.bsonTimestamp(1, 3)), + SetOptions.merge()); + break; + case 2: + assertNotNull(docSnap); + + assertEquals( + docSnap.getBsonObjectId("bsonObjectId"), + FieldValue.bsonObjectId("507f191e810c19729de860eb")); + assertEquals( + docSnap.getRegexValue("regex"), FieldValue.regex("^foo", "m")); + assertEquals( + docSnap.getBsonTimestamp("bsonTimestamp"), + FieldValue.bsonTimestamp(1, 3)); + + ref.update(map("int32", FieldValue.int32(2))); + break; + case 3: + assertNotNull(docSnap); + + assertEquals(docSnap.getInt32Value("int32"), FieldValue.int32(2)); + + ref.delete(); + break; + case 4: + assertNull(docSnap); + break; + } + } catch (Throwable t) { + failureMessage.set(t); + semaphore.release(totalPermits); + } + + semaphore.release(); + }); + + semaphore.acquire(totalPermits); + } finally { + if (registration != null) { + registration.remove(); + } + + if (failureMessage.get() != null) { + throw failureMessage.get(); + } + } + } + + /** Verifies that the SDK orders Bson type fields the same way as the backend by comparing the result of Query.get() and + * Query.addSnapshotListener(), as well as the online and offline results */ + @Test + public void filterAndOrderBsonObjectIds() throws Exception { + Map> docs = + map( + "a", + map("key", FieldValue.bsonObjectId("507f191e810c19729de860ea")), + "b", + map("key", FieldValue.bsonObjectId("507f191e810c19729de860eb")), + "c", + map("key", FieldValue.bsonObjectId("507f191e810c19729de860ec"))); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereGreaterThan("key", FieldValue.bsonObjectId("507f191e810c19729de860ea")); + + assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); + } + + @Test + public void filterAndOrderBsonTimestamps() throws Exception { + Map> docs = + map( + "a", + map("key", FieldValue.bsonTimestamp(1, 1)), + "b", + map("key", FieldValue.bsonTimestamp(1, 2)), + "c", + map("key", FieldValue.bsonTimestamp(2, 1))); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereGreaterThan("key", FieldValue.bsonTimestamp(1, 1)); + + assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); + } + + @Test + public void filterAndOrderBsonBinaryData() throws Exception { + Map> docs = + map( + "a", + map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})), + "b", + map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4})), + "c", + map("key", FieldValue.bsonBinaryData(2, new byte[] {1, 2, 2}))); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereGreaterThan("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})); + + assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); + } + + @Test + public void filterAndOrderRegex() throws Exception { + Map> docs = + map( + "a", map("key", FieldValue.regex("^bar", "i")), + "b", map("key", FieldValue.regex("^bar", "m")), + "c", map("key", FieldValue.regex("^baz", "i"))); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereGreaterThan("key", FieldValue.regex("^bar", "i")); + + assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); + } + + @Test + public void filterAndOrderInt32() throws Exception { + Map> docs = + map( + "a", map("key", FieldValue.int32(-1)), + "b", map("key", FieldValue.int32(1)), + "c", map("key", FieldValue.int32(2))); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereGreaterThanOrEqualTo("key", FieldValue.int32(1)); + + assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); + } + + @Test + public void filterAndOrderMinKey() throws Exception { + Map> docs = + map( + "a", map("key", FieldValue.minKey()), + "b", map("key", FieldValue.minKey()), + "c", map("key", FieldValue.maxKey())); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = + randomColl + .orderBy( + "key", + Direction + .DESCENDING) // minKeys are equal, would sort by documentId as secondary order + .whereEqualTo("key", FieldValue.minKey()); + + assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("b", "a")); + } + + @Test + public void filterAndOrderMaxKey() throws Exception { + Map> docs = + map( + "a", map("key", FieldValue.minKey()), + "b", map("key", FieldValue.maxKey()), + "c", map("key", FieldValue.maxKey())); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = + randomColl + .orderBy( + "key", + Direction + .DESCENDING) // maxKeys are equal, would sort by documentId as secondary order + .whereEqualTo("key", FieldValue.maxKey()); + + assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); + } + + @Test + public void orderBsonTypesTogether() throws Exception { + Map> docs = + map( + "bsonObjectId1", + map("key", FieldValue.bsonObjectId("507f191e810c19729de860ea")), + "bsonObjectId2", + map("key", FieldValue.bsonObjectId("507f191e810c19729de860eb")), + "bsonObjectId3", + map("key", FieldValue.bsonObjectId("407f191e810c19729de860ea")), + "regex1", + map("key", FieldValue.regex("^bar", "m")), + "regex2", + map("key", FieldValue.regex("^bar", "i")), + "regex3", + map("key", FieldValue.regex("^baz", "i")), + "bsonTimestamp1", + map("key", FieldValue.bsonTimestamp(2, 0)), + "bsonTimestamp2", + map("key", FieldValue.bsonTimestamp(1, 2)), + "bsonTimestamp3", + map("key", FieldValue.bsonTimestamp(1, 1)), + "bsonBinary1", + map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})), + "bsonBinary2", + map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4})), + "bsonBinary3", + map("key", FieldValue.bsonBinaryData(2, new byte[] {1, 2, 2})), + "int32Value1", + map("key", FieldValue.int32(-1)), + "int32Value2", + map("key", FieldValue.int32(1)), + "int32Value3", + map("key", FieldValue.int32(0)), + "minKey1", + map("key", FieldValue.minKey()), + "minKey2", + map("key", FieldValue.minKey()), + "maxKey1", + map("key", FieldValue.maxKey()), + "maxKey2", + map("key", FieldValue.maxKey())); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = randomColl.orderBy("key", Direction.DESCENDING); + List expectedDocs = + Arrays.asList( + "maxKey2", + "maxKey1", + "regex3", + "regex1", + "regex2", + "bsonObjectId2", + "bsonObjectId1", + "bsonObjectId3", + "bsonBinary3", + "bsonBinary2", + "bsonBinary1", + "bsonTimestamp1", + "bsonTimestamp2", + "bsonTimestamp3", + "int32Value2", + "int32Value3", + "int32Value1", + "minKey2", + "minKey1"); + + assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, expectedDocs); + } + + @Test + public void canRunTransactionsOnDocumentsWithBsonTypes() throws Exception { + Map> docs = + map( + "a", map("key", FieldValue.bsonObjectId("507f191e810c19729de860ea")), + "b", map("key", FieldValue.regex("^foo", "i")), + "c", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + waitFor( + randomColl.firestore.runTransaction( + transaction -> { + DocumentSnapshot docSnap = transaction.get(randomColl.document("a")); + assertEquals( + docSnap.getBsonObjectId("key"), + FieldValue.bsonObjectId("507f191e810c19729de860ea")); + transaction.update(randomColl.document("b"), "key", FieldValue.regex("^bar", "i")); + transaction.delete(randomColl.document("c")); + return null; + })); + + QuerySnapshot getSnapshot = waitFor(randomColl.get()); + + List getSnapshotDocIds = + getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); + + assertTrue(getSnapshotDocIds.equals(Arrays.asList("a", "b"))); + assertEquals( + getSnapshot.getDocuments().get(0).getBsonObjectId("key"), + FieldValue.bsonObjectId("507f191e810c19729de860ea")); + assertEquals( + getSnapshot.getDocuments().get(1).getRegexValue("key"), FieldValue.regex("^bar", "i")); + } +} diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/FirestoreTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/FirestoreTest.java index 95dcd2863fe..66c7dadfac0 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/FirestoreTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/FirestoreTest.java @@ -15,6 +15,7 @@ package com.google.firebase.firestore; import static com.google.firebase.firestore.AccessHelper.getAsyncQueue; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.assertSDKQueryResultsConsistentWithBackend; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.checkOnlineAndOfflineResultsMatch; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.isRunningAgainstEmulator; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.newTestSettings; @@ -64,7 +65,6 @@ import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Semaphore; -import java.util.stream.Collectors; import org.junit.After; import org.junit.Test; import org.junit.runner.RunWith; @@ -1497,28 +1497,27 @@ public void testCanGetSameOrDifferentPersistentCacheIndexManager() { } @Test - public void snapshotListenerSortsQueryByDocumentIdsSameAsGetQuery() { + public void snapshotListenerSortsQueryByDocumentIdsSameAsGetQuery() throws Exception { Map> testDocs = map( - "A", map("a", 1), - "a", map("a", 1), - "Aa", map("a", 1), - "7", map("a", 1), - "12", map("a", 1), - "__id7__", map("a", 1), - "__id12__", map("a", 1), - "__id-2__", map("a", 1), - "__id1_", map("a", 1), - "_id1__", map("a", 1), - "__id", map("a", 1), - "__id9223372036854775807__", map("a", 1), - "__id-9223372036854775808__", map("a", 1)); + "A", map("a", 1L), + "a", map("a", 1L), + "Aa", map("a", 1L), + "7", map("a", 1L), + "12", map("a", 1L), + "__id7__", map("a", 1L), + "__id12__", map("a", 1L), + "__id-2__", map("a", 1L), + "__id1_", map("a", 1L), + "_id1__", map("a", 1L), + "__id", map("a", 1L), + "__id9223372036854775807__", map("a", 1L), + "__id-9223372036854775808__", map("a", 1L)); CollectionReference colRef = testCollectionWithDocs(testDocs); - - // Run get query Query orderedQuery = colRef.orderBy(FieldPath.documentId()); - List expectedDocIds = + + List expectedDocs = Arrays.asList( "__id-9223372036854775808__", "__id-2__", @@ -1534,106 +1533,61 @@ public void snapshotListenerSortsQueryByDocumentIdsSameAsGetQuery() { "_id1__", "a"); - QuerySnapshot getSnapshot = waitFor(orderedQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - - // Run query with snapshot listener - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - orderedQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } - - // Assert that get and snapshot listener requests sort docs in the same, expected order - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); + assertSDKQueryResultsConsistentWithBackend(orderedQuery, testDocs, expectedDocs); } @Test - public void snapshotListenerSortsFilteredQueryByDocumentIdsSameAsGetQuery() { + public void snapshotListenerSortsFilteredQueryByDocumentIdsSameAsGetQuery() throws Exception { Map> testDocs = map( - "A", map("a", 1), - "a", map("a", 1), - "Aa", map("a", 1), - "7", map("a", 1), - "12", map("a", 1), - "__id7__", map("a", 1), - "__id12__", map("a", 1), - "__id-2__", map("a", 1), - "__id1_", map("a", 1), - "_id1__", map("a", 1), - "__id", map("a", 1), - "__id9223372036854775807__", map("a", 1), - "__id-9223372036854775808__", map("a", 1)); + "A", map("a", 1L), + "a", map("a", 1L), + "Aa", map("a", 1L), + "7", map("a", 1L), + "12", map("a", 1L), + "__id7__", map("a", 1L), + "__id12__", map("a", 1L), + "__id-2__", map("a", 1L), + "__id1_", map("a", 1L), + "_id1__", map("a", 1L), + "__id", map("a", 1L), + "__id9223372036854775807__", map("a", 1L), + "__id-9223372036854775808__", map("a", 1L)); CollectionReference colRef = testCollectionWithDocs(testDocs); - - // Run get query Query filteredQuery = colRef .whereGreaterThan(FieldPath.documentId(), "__id7__") .whereLessThanOrEqualTo(FieldPath.documentId(), "A") .orderBy(FieldPath.documentId()); - List expectedDocIds = - Arrays.asList("__id12__", "__id9223372036854775807__", "12", "7", "A"); - - QuerySnapshot getSnapshot = waitFor(filteredQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - - // Run query with snapshot listener - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - filteredQuery.addSnapshotListener(eventAccumulator.listener()); - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } + List expectedDocs = + Arrays.asList("__id12__", "__id9223372036854775807__", "12", "7", "A"); - // Assert that get and snapshot listener requests sort docs in the same, expected order - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); + assertSDKQueryResultsConsistentWithBackend(filteredQuery, testDocs, expectedDocs); } @Test public void sdkOrdersQueryByDocumentIdTheSameWayOnlineAndOffline() { Map> testDocs = map( - "A", map("a", 1), - "a", map("a", 1), - "Aa", map("a", 1), - "7", map("a", 1), - "12", map("a", 1), - "__id7__", map("a", 1), - "__id12__", map("a", 1), - "__id-2__", map("a", 1), - "__id1_", map("a", 1), - "_id1__", map("a", 1), - "__id", map("a", 1), - "__id9223372036854775807__", map("a", 1), - "__id-9223372036854775808__", map("a", 1)); + "A", map("a", 1L), + "a", map("a", 1L), + "Aa", map("a", 1L), + "7", map("a", 1L), + "12", map("a", 1L), + "__id7__", map("a", 1L), + "__id12__", map("a", 1L), + "__id-2__", map("a", 1L), + "__id1_", map("a", 1L), + "_id1__", map("a", 1L), + "__id", map("a", 1L), + "__id9223372036854775807__", map("a", 1L), + "__id-9223372036854775808__", map("a", 1L)); CollectionReference colRef = testCollectionWithDocs(testDocs); - // Test query Query orderedQuery = colRef.orderBy(FieldPath.documentId()); + List expectedDocIds = Arrays.asList( "__id-9223372036854775808__", @@ -1650,12 +1604,11 @@ public void sdkOrdersQueryByDocumentIdTheSameWayOnlineAndOffline() { "_id1__", "a"); - // Run query with snapshot listener checkOnlineAndOfflineResultsMatch(orderedQuery, expectedDocIds.toArray(new String[0])); } @Test - public void snapshotListenerSortsUnicodeStringsAsServer() { + public void snapshotListenerSortsUnicodeStringsAsServer() throws Exception { Map> testDocs = map( "a", @@ -1683,36 +1636,15 @@ public void snapshotListenerSortsUnicodeStringsAsServer() { CollectionReference colRef = testCollectionWithDocs(testDocs); Query orderedQuery = colRef.orderBy("value"); + List expectedDocIds = Arrays.asList("b", "a", "h", "i", "c", "f", "e", "d", "g", "k", "j"); - QuerySnapshot getSnapshot = waitFor(orderedQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - orderedQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } - - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); - - checkOnlineAndOfflineResultsMatch(orderedQuery, expectedDocIds.toArray(new String[0])); + assertSDKQueryResultsConsistentWithBackend(orderedQuery, testDocs, expectedDocIds); } @Test - public void snapshotListenerSortsUnicodeStringsInArrayAsServer() { + public void snapshotListenerSortsUnicodeStringsInArrayAsServer() throws Exception { Map> testDocs = map( "a", @@ -1740,36 +1672,15 @@ public void snapshotListenerSortsUnicodeStringsInArrayAsServer() { CollectionReference colRef = testCollectionWithDocs(testDocs); Query orderedQuery = colRef.orderBy("value"); + List expectedDocIds = Arrays.asList("b", "a", "h", "i", "c", "f", "e", "d", "g", "k", "j"); - QuerySnapshot getSnapshot = waitFor(orderedQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - orderedQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } - - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); - - checkOnlineAndOfflineResultsMatch(orderedQuery, expectedDocIds.toArray(new String[0])); + assertSDKQueryResultsConsistentWithBackend(orderedQuery, testDocs, expectedDocIds); } @Test - public void snapshotListenerSortsUnicodeStringsInMapAsServer() { + public void snapshotListenerSortsUnicodeStringsInMapAsServer() throws Exception { Map> testDocs = map( "a", @@ -1797,36 +1708,15 @@ public void snapshotListenerSortsUnicodeStringsInMapAsServer() { CollectionReference colRef = testCollectionWithDocs(testDocs); Query orderedQuery = colRef.orderBy("value"); + List expectedDocIds = Arrays.asList("b", "a", "h", "i", "c", "f", "e", "d", "g", "k", "j"); - QuerySnapshot getSnapshot = waitFor(orderedQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - orderedQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } - - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); - - checkOnlineAndOfflineResultsMatch(orderedQuery, expectedDocIds.toArray(new String[0])); + assertSDKQueryResultsConsistentWithBackend(orderedQuery, testDocs, expectedDocIds); } @Test - public void snapshotListenerSortsUnicodeStringsInMapKeyAsServer() { + public void snapshotListenerSortsUnicodeStringsInMapKeyAsServer() throws Exception { Map> testDocs = map( "a", @@ -1854,36 +1744,15 @@ public void snapshotListenerSortsUnicodeStringsInMapKeyAsServer() { CollectionReference colRef = testCollectionWithDocs(testDocs); Query orderedQuery = colRef.orderBy("value"); + List expectedDocIds = Arrays.asList("b", "a", "h", "i", "c", "f", "e", "d", "g", "k", "j"); - QuerySnapshot getSnapshot = waitFor(orderedQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - orderedQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } - - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); - - checkOnlineAndOfflineResultsMatch(orderedQuery, expectedDocIds.toArray(new String[0])); + assertSDKQueryResultsConsistentWithBackend(orderedQuery, testDocs, expectedDocIds); } @Test - public void snapshotListenerSortsUnicodeStringsInDocumentKeyAsServer() { + public void snapshotListenerSortsUnicodeStringsInDocumentKeyAsServer() throws Exception { Map> testDocs = map( "Łukasiewicz", @@ -1911,38 +1780,16 @@ public void snapshotListenerSortsUnicodeStringsInDocumentKeyAsServer() { CollectionReference colRef = testCollectionWithDocs(testDocs); Query orderedQuery = colRef.orderBy(FieldPath.documentId()); + List expectedDocIds = Arrays.asList( "Sierpiński", "Łukasiewicz", "你好", "你顥", "岩澤", "︒", "P", "🄟", "🐵", "😀", "😁"); - QuerySnapshot getSnapshot = waitFor(orderedQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - orderedQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } - - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); - - checkOnlineAndOfflineResultsMatch(orderedQuery, expectedDocIds.toArray(new String[0])); + assertSDKQueryResultsConsistentWithBackend(orderedQuery, testDocs, expectedDocIds); } @Test - public void snapshotListenerSortsInvalidUnicodeStringsAsServer() { - // Note: Protocol Buffer converts any invalid surrogates to "?". + public void snapshotListenerSortsInvalidUnicodeStringsAsServer() throws Exception { Map> testDocs = map( "a", @@ -1962,30 +1809,27 @@ public void snapshotListenerSortsInvalidUnicodeStringsAsServer() { CollectionReference colRef = testCollectionWithDocs(testDocs); Query orderedQuery = colRef.orderBy("value"); - List expectedDocIds = Arrays.asList("a", "d", "e", "f", "g", "b", "c"); - - QuerySnapshot getSnapshot = waitFor(orderedQuery.get()); - List getSnapshotDocIds = - getSnapshot.getDocuments().stream().map(ds -> ds.getId()).collect(Collectors.toList()); - EventAccumulator eventAccumulator = new EventAccumulator(); - ListenerRegistration registration = - orderedQuery.addSnapshotListener(eventAccumulator.listener()); - - List watchSnapshotDocIds = new ArrayList<>(); - try { - QuerySnapshot watchSnapshot = eventAccumulator.await(); - watchSnapshotDocIds = - watchSnapshot.getDocuments().stream() - .map(documentSnapshot -> documentSnapshot.getId()) - .collect(Collectors.toList()); - } finally { - registration.remove(); - } + List expectedDocIds = Arrays.asList("a", "d", "e", "f", "g", "b", "c"); - assertTrue(getSnapshotDocIds.equals(expectedDocIds)); - assertTrue(watchSnapshotDocIds.equals(expectedDocIds)); + // Note: Protocol Buffer converts any invalid surrogates to "?". + Map> actualDocs = + map( + "a", + map("value", "Z"), + "b", + map("value", "你好"), + "c", + map("value", "😀"), + "d", + map("value", "ab?"), + "e", + map("value", "ab?"), + "f", + map("value", "ab??"), + "g", + map("value", "ab??")); - checkOnlineAndOfflineResultsMatch(orderedQuery, expectedDocIds.toArray(new String[0])); + assertSDKQueryResultsConsistentWithBackend(orderedQuery, actualDocs, expectedDocIds); } } diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java index 8cceddb7188..8b3f48c06a1 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java @@ -15,7 +15,9 @@ package com.google.firebase.firestore; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testCollection; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testCollectionOnNightly; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testDocument; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testDocumentOnNightly; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.waitFor; import static com.google.firebase.firestore.testutil.TestUtil.expectError; import static com.google.firebase.firestore.testutil.TestUtil.map; @@ -47,6 +49,13 @@ public static final class POJO { Blob blob; GeoPoint geoPoint; DocumentReference documentReference; + BsonObjectId bsonObjectId; + BsonBinaryData bsonBinaryData; + BsonTimestamp bsonTimestamp; + RegexValue regexValue; + Int32Value int32Value; + MinKey minKey; + MaxKey maxKey; public POJO() {} @@ -60,6 +69,13 @@ public POJO(double number, String str, DocumentReference documentReference) { this.timestamp = new Timestamp(123, 123456000); this.blob = Blob.fromBytes(new byte[] {3, 1, 4, 1, 5}); this.geoPoint = new GeoPoint(3.1415, 9.2653); + this.bsonObjectId = FieldValue.bsonObjectId("507f191e810c19729de860ea"); + this.bsonBinaryData = FieldValue.bsonBinaryData(1, new byte[] {3, 1, 4, 1, 5}); + this.bsonTimestamp = FieldValue.bsonTimestamp(1, 2); + this.regexValue = FieldValue.regex("^foo", "i"); + this.int32Value = FieldValue.int32(1); + this.minKey = FieldValue.minKey(); + this.maxKey = FieldValue.maxKey(); } public double getNumber() { @@ -118,6 +134,62 @@ public void setDocumentReference(DocumentReference documentReference) { this.documentReference = documentReference; } + public BsonObjectId getBsonObjectId() { + return bsonObjectId; + } + + public void setBsonObjectId(BsonObjectId bsonObjectId) { + this.bsonObjectId = bsonObjectId; + } + + public BsonBinaryData getBsonBinaryData() { + return bsonBinaryData; + } + + public void setBsonBinaryData(BsonBinaryData bsonBinaryData) { + this.bsonBinaryData = bsonBinaryData; + } + + public BsonTimestamp getBsonTimestamp() { + return bsonTimestamp; + } + + public void setBsonTimestamp(BsonTimestamp bsonTimestamp) { + this.bsonTimestamp = bsonTimestamp; + } + + public RegexValue getRegexValue() { + return regexValue; + } + + public void setRegexValue(RegexValue regexValue) { + this.regexValue = regexValue; + } + + public Int32Value getInt32Value() { + return int32Value; + } + + public void setInt32Value(Int32Value int32Value) { + this.int32Value = int32Value; + } + + public MinKey getMinKey() { + return minKey; + } + + public void setMinKey(MinKey minKey) { + this.minKey = minKey; + } + + public MaxKey getMaxKey() { + return maxKey; + } + + public void setMaxKey(MaxKey maxKey) { + this.maxKey = maxKey; + } + @Override public boolean equals(Object o) { if (this == o) { @@ -147,6 +219,27 @@ public boolean equals(Object o) { if (!geoPoint.equals(pojo.geoPoint)) { return false; } + if (!bsonBinaryData.equals(pojo.bsonBinaryData)) { + return false; + } + if (!bsonTimestamp.equals(pojo.bsonTimestamp)) { + return false; + } + if (!bsonObjectId.equals(pojo.bsonObjectId)) { + return false; + } + if (!regexValue.equals(pojo.regexValue)) { + return false; + } + if (!int32Value.equals(pojo.int32Value)) { + return false; + } + if (!minKey.equals(pojo.minKey)) { + return false; + } + if (!maxKey.equals(pojo.maxKey)) { + return false; + } // TODO: Implement proper equality on DocumentReference. return documentReference.getPath().equals(pojo.documentReference.getPath()); @@ -164,6 +257,13 @@ public int hashCode() { result = 31 * result + blob.hashCode(); result = 31 * result + geoPoint.hashCode(); result = 31 * result + documentReference.getPath().hashCode(); + result = 31 * result + bsonObjectId.hashCode(); + result = 31 * result + bsonBinaryData.hashCode(); + result = 31 * result + bsonTimestamp.hashCode(); + result = 31 * result + regexValue.hashCode(); + result = 31 * result + int32Value.hashCode(); + result = 31 * result + minKey.hashCode(); + result = 31 * result + maxKey.hashCode(); return result; } } @@ -236,7 +336,7 @@ public void tearDown() { @Test public void testWriteAndRead() { - CollectionReference collection = testCollection(); + CollectionReference collection = testCollectionOnNightly(); POJO data = new POJO(1.0, "a", collection.document()); DocumentReference reference = waitFor(collection.add(data)); DocumentSnapshot doc = waitFor(reference.get()); @@ -260,7 +360,7 @@ public void testDocumentIdAnnotation() { @Test public void testSetMerge() { - CollectionReference collection = testCollection(); + CollectionReference collection = testCollectionOnNightly(); POJO data = new POJO(1.0, "a", collection.document()); DocumentReference reference = waitFor(collection.add(data)); DocumentSnapshot doc = waitFor(reference.get()); @@ -277,7 +377,7 @@ public void testSetMerge() { // General smoke test that makes sure APIs accept POJOs. @Test public void testAPIsAcceptPOJOsForFields() { - DocumentReference ref = testDocument(); + DocumentReference ref = testDocumentOnNightly(); ArrayList> tasks = new ArrayList<>(); // as Map<> entries in a set() call. @@ -296,7 +396,7 @@ public void testAPIsAcceptPOJOsForFields() { // as Query parameters. data.setBlob(null); // blobs are broken, see b/117680212 - tasks.add(testCollection().whereEqualTo("field", data).get()); + tasks.add(testCollectionOnNightly().whereEqualTo("field", data).get()); waitFor(Tasks.whenAll(tasks)); } diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java index 664c2207843..f3602c59fdc 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java @@ -14,17 +14,25 @@ package com.google.firebase.firestore; +import static com.google.firebase.firestore.FieldValue.maxKey; +import static com.google.firebase.firestore.FieldValue.minKey; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.assertSDKQueryResultsConsistentWithBackend; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testCollection; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testCollectionOnNightly; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testDocumentOnNightly; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.waitFor; +import static com.google.firebase.firestore.testutil.IntegrationTestUtil.writeTestDocsOnCollection; import static com.google.firebase.firestore.testutil.TestUtil.blob; import static com.google.firebase.firestore.testutil.TestUtil.map; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import androidx.test.ext.junit.runners.AndroidJUnit4; import com.google.firebase.Timestamp; import com.google.firebase.firestore.testutil.IntegrationTestUtil; +import com.google.protobuf.ByteString; import java.util.Arrays; import java.util.Collections; import java.util.Date; @@ -87,9 +95,201 @@ public void testCanReadAndWriteDates() { verifySuccessfulWriteReadCycle(map("date", new Timestamp(date)), testDoc()); } + @Test + public void testCanReadAndWriteDocumentReferences() { + DocumentReference docRef = testDoc(); + Map data = map("a", 42L, "ref", docRef); + verifySuccessfulWriteReadCycle(data, docRef); + } + + @Test + public void testCanReadAndWriteDocumentReferencesInLists() { + DocumentReference docRef = testDoc(); + List refs = Collections.singletonList(docRef); + Map data = map("a", 42L, "refs", refs); + verifySuccessfulWriteReadCycle(data, docRef); + } + + @Test + public void testCanReadAndWriteMinKey() { + verifySuccessfulWriteReadCycle(map("minKey", minKey()), testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteMaxKey() { + verifySuccessfulWriteReadCycle(map("maxKey", maxKey()), testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteRegexValue() { + verifySuccessfulWriteReadCycle( + map("regex", new RegexValue("^foo", "i")), testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteInt32Value() { + verifySuccessfulWriteReadCycle(map("int32", new Int32Value(1)), testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteBsonTimestampValue() { + verifySuccessfulWriteReadCycle( + map("bsonTimestamp", new BsonTimestamp(1, 2)), testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteBsonObjectIdValue() { + verifySuccessfulWriteReadCycle( + map("bsonObjectId", new BsonObjectId("507f191e810c19729de860ea")), testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteBsonBinaryValue() { + verifySuccessfulWriteReadCycle( + map("bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})), + testDocumentOnNightly()); + + verifySuccessfulWriteReadCycle( + map("bsonBinary", BsonBinaryData.fromBytes(128, new byte[] {1, 2, 3})), + testDocumentOnNightly()); + + verifySuccessfulWriteReadCycle( + map("bsonBinary", BsonBinaryData.fromByteString(255, ByteString.EMPTY)), + testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteBsonTypesInLists() { + List data = + Arrays.asList( + FieldValue.bsonObjectId("507f191e810c19729de860ea"), + FieldValue.regex("^foo", "i"), + FieldValue.bsonTimestamp(1, 2), + FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}), + FieldValue.int32(1), + FieldValue.minKey(), + FieldValue.maxKey()); + + verifySuccessfulWriteReadCycle(map("BsonTypes", data), testDocumentOnNightly()); + } + + @Test + public void testCanReadAndWriteBsonTypesInMaps() { + Map data = + map( + "bsonObjectId", FieldValue.bsonObjectId("507f191e810c19729de860ea"), + "regex", FieldValue.regex("^foo", "i"), + "bsonTimestamp", FieldValue.bsonTimestamp(1, 2), + "bsonBinary", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}), + "int32", FieldValue.int32(1), + "minKey", FieldValue.minKey(), + "maxKey", FieldValue.maxKey()); + + verifySuccessfulWriteReadCycle(map("BsonTypes", data), testDocumentOnNightly()); + } + + @Test + public void invalidRegexGetsRejected() throws Exception { + Exception error = null; + try { + waitFor(testDocumentOnNightly().set(map("key", FieldValue.regex("foo", "a")))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue( + error + .getMessage() + .contains( + "Invalid regex option 'a'. Supported options are 'i', 'm', 's', 'u', and 'x'")); + } + + @Test + public void invalidBsonObjectIdGetsRejected() throws Exception { + Exception error = null; + try { + // bsonObjectId with length not equal to 24 gets rejected + waitFor(testDocumentOnNightly().set(map("key", FieldValue.bsonObjectId("foobar")))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue(error.getMessage().contains("Object ID hex string has incorrect length.")); + } + + @Test + public void invalidBsonBinaryDataGetsRejected() throws Exception { + Exception error = null; + try { + waitFor( + testDocumentOnNightly() + .set(map("key", FieldValue.bsonBinaryData(1234, new byte[] {1, 2, 3})))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue( + error + .getMessage() + .contains( + "The subtype for BsonBinaryData must be a value in the inclusive [0, 255] range.")); + } + + @Test + public void invalidBsonTimestampDataGetsRejected() throws Exception { + Exception error = null; + try { + waitFor(testDocumentOnNightly().set(map("key", FieldValue.bsonTimestamp(-1, 1)))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue( + error + .getMessage() + .contains( + "The field 'seconds' value (-1) does not represent an unsigned 32-bit integer.")); + + try { + waitFor(testDocumentOnNightly().set(map("key", FieldValue.bsonTimestamp(4294967296L, 1)))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue( + error + .getMessage() + .contains( + "The field 'seconds' value (4294967296) does not represent an unsigned 32-bit integer.")); + + try { + waitFor(testDocumentOnNightly().set(map("key", FieldValue.bsonTimestamp(1, -1)))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue( + error + .getMessage() + .contains( + "The field 'increment' value (-1) does not represent an unsigned 32-bit integer.")); + + try { + waitFor(testDocumentOnNightly().set(map("key", FieldValue.bsonTimestamp(1, 4294967296L)))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue( + error + .getMessage() + .contains( + "The field 'increment' value (4294967296) does not represent an unsigned 32-bit integer.")); + } + @Test public void testCanUseTypedAccessors() { - DocumentReference doc = testDoc(); + DocumentReference doc = testDocumentOnNightly(); Map data = map( "null", @@ -111,7 +311,27 @@ public void testCanUseTypedAccessors() { "timestamp", new Timestamp(100, 123000000), "reference", - doc); + doc, + "array", + Arrays.asList(1.0, "foo", map("nested", true), null), + "map", + map("key", true), + "vector", + FieldValue.vector(new double[] {1, 2, 3}), + "regex", + new RegexValue("^foo", "i"), + "int32", + new Int32Value(1), + "bsonTimestamp", + new BsonTimestamp(1, 2), + "bsonObjectId", + new BsonObjectId("507f191e810c19729de860ea"), + "bsonBinary", + BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + "minKey", + minKey(), + "maxKey", + maxKey()); waitFor(doc.set(data)); DocumentSnapshot snapshot = waitFor(doc.get()); @@ -132,11 +352,21 @@ public void testCanUseTypedAccessors() { assertEquals(timestamp.toDate(), snapshot.getDate("timestamp")); assertTrue(data.get("reference") instanceof DocumentReference); assertEquals(((DocumentReference) data.get("reference")).getPath(), doc.getPath()); + assertEquals(data.get("array"), snapshot.get("array")); + assertEquals(data.get("map"), snapshot.get("map")); + assertEquals(data.get("vector"), snapshot.getVectorValue("vector")); + assertEquals(data.get("regex"), snapshot.getRegexValue("regex")); + assertEquals(data.get("int32"), snapshot.getInt32Value("int32")); + assertEquals(data.get("bsonTimestamp"), snapshot.getBsonTimestamp("bsonTimestamp")); + assertEquals(data.get("bsonObjectId"), snapshot.getBsonObjectId("bsonObjectId")); + assertEquals(data.get("bsonBinary"), snapshot.getBsonBinaryData("bsonBinary")); + assertEquals(data.get("minKey"), snapshot.getMinKey("minKey")); + assertEquals(data.get("maxKey"), snapshot.getMaxKey("maxKey")); } @Test public void testTypeAccessorsCanReturnNull() { - DocumentReference doc = testDoc(); + DocumentReference doc = testDocumentOnNightly(); Map data = map(); waitFor(doc.set(data)); @@ -153,20 +383,93 @@ public void testTypeAccessorsCanReturnNull() { assertNull(snapshot.getDate("missing")); assertNull(snapshot.getTimestamp("missing")); assertNull(snapshot.getDocumentReference("missing")); + assertNull(snapshot.getVectorValue("missing")); + assertNull(snapshot.getRegexValue("missing")); + assertNull(snapshot.getInt32Value("missing")); + assertNull(snapshot.getBsonTimestamp("missing")); + assertNull(snapshot.getBsonObjectId("missing")); + assertNull(snapshot.getBsonBinaryData("missing")); + assertNull(snapshot.getMinKey("missing")); + assertNull(snapshot.getMaxKey("missing")); } @Test - public void testCanReadAndWriteDocumentReferences() { - DocumentReference docRef = testDoc(); - Map data = map("a", 42L, "ref", docRef); - verifySuccessfulWriteReadCycle(data, docRef); - } + public void snapshotListenerSortsDifferentTypesSameAsServer() throws Exception { + CollectionReference colRef = testCollectionOnNightly(); + // Document reference needs to be created first to make sure it is using the same firestore + // instance in creation + DocumentReference docRef = colRef.document("testDocRef"); - @Test - public void testCanReadAndWriteDocumentReferencesInLists() { - DocumentReference docRef = testDoc(); - List refs = Collections.singletonList(docRef); - Map data = map("a", 42L, "refs", refs); - verifySuccessfulWriteReadCycle(data, docRef); + Map> testDocs = + map( + "null", + map("value", null), + "min", + map("value", FieldValue.minKey()), + "boolean", + map("value", true), + "nan", + map("value", Double.NaN), + "int32", + map("value", new Int32Value(1)), + "double", + map("value", 1.0), + "int", + map("value", 1L), + "timestamp", + map("value", new Timestamp(100, 123000000)), + "bsonTimestamp", + map("value", new BsonTimestamp(1, 2)), + "string", + map("value", "a"), + "bytes", + map("value", blob(1, 2, 3)), + "bsonBinary", + map("value", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})), + "reference", + map("value", docRef), + "bsonObjectId", + map("value", new BsonObjectId("507f191e810c19729de860ea")), + "geoPoint", + map("value", new GeoPoint(1.23, 4.56)), + "regex", + map("value", new RegexValue("^foo", "i")), + "array", + map("value", Arrays.asList(1.0, "foo", map("key", true), null)), + "vector", + map("value", FieldValue.vector(new double[] {1, 2, 3})), + "map", + map("value", map("key", true)), + "max", + map("value", FieldValue.maxKey())); + + writeTestDocsOnCollection(colRef, testDocs); + + Query orderedQuery = colRef.orderBy("value"); + List expectedDocs = + Arrays.asList( + "null", + "min", + "boolean", + "nan", + "double", + "int", + "int32", + "timestamp", + "bsonTimestamp", + "string", + "bytes", + "bsonBinary", + "reference", + "bsonObjectId", + "geoPoint", + "regex", + "array", + "vector", + "map", + "max"); + + // Assert that get and snapshot listener requests sort docs in the same, expected order + assertSDKQueryResultsConsistentWithBackend(orderedQuery, testDocs, expectedDocs); } } diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/testutil/IntegrationTestUtil.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/testutil/IntegrationTestUtil.java index a7417d96563..2cf531ba094 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/testutil/IntegrationTestUtil.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/testutil/IntegrationTestUtil.java @@ -18,7 +18,9 @@ import static com.google.firebase.firestore.util.Util.autoId; import static java.util.Arrays.asList; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import android.content.Context; import android.os.StrictMode; @@ -205,6 +207,36 @@ public static FirebaseFirestore testFirestore() { return testFirestore(newTestSettings()); } + public static FirebaseFirestore testFirestoreOnNightly() { + FirebaseFirestoreSettings settings = + new FirebaseFirestoreSettings.Builder() + .setHost("test-firestore.sandbox.googleapis.com") + .setSslEnabled(true) + .build(); + + DatabaseId databaseId = DatabaseId.forDatabase("firestore-sdk-nightly", "(default)"); + + String persistenceKey = "db" + firestoreStatus.size(); + + return testFirestore(databaseId, Level.DEBUG, settings, persistenceKey); + } + + public static CollectionReference testCollectionOnNightly() { + return testFirestoreOnNightly().collection(autoId()); + } + + public static DocumentReference testDocumentOnNightly() { + return testCollectionOnNightly().document(); + } + + public static CollectionReference testCollectionWithDocsOnNightly( + Map> docs) { + CollectionReference collection = testCollectionOnNightly(); + CollectionReference writer = testFirestoreOnNightly().collection(collection.getId()); + writeAllDocs(writer, docs); + return collection; + } + /** * Initializes a new Firestore instance that uses the default project, customized with the * provided settings. @@ -366,6 +398,12 @@ public static CollectionReference testCollectionWithDocs(Map> docs) { + CollectionReference writer = testFirestoreOnNightly().collection(collection.getId()); + writeAllDocs(writer, docs); + } + public static void writeAllDocs( CollectionReference collection, Map> docs) { WriteBatch writeBatch = null; @@ -537,4 +575,66 @@ public static void checkOnlineAndOfflineResultsMatch(Query query, String... expe assertEquals(expected, querySnapshotToIds(docsFromCache)); } } + + // Asserts that the given query produces the expected result for all of the + // following scenarios: + // 1. Performing the given query using source=server, compare with expected result and populate + // cache. + // 2. Performing the given query using source=cache, compare with server result and expected + // result. + // 3. Using a snapshot listener to raise snapshots from cache and server, compare them with + // expected result. + public static void assertSDKQueryResultsConsistentWithBackend( + Query query, Map> allData, List expectedDocIds) + throws Exception { + // Check the cache round trip first to make sure cache is properly populated, otherwise the + // snapshot listener below will return partial results from previous + // "assertSDKQueryResultsConsistentWithBackend" calls if it is called multiple times in one test + checkOnlineAndOfflineResultsMatch(query, expectedDocIds.toArray(new String[0])); + + EventAccumulator eventAccumulator = new EventAccumulator<>(); + ListenerRegistration registration = + query.addSnapshotListener(MetadataChanges.INCLUDE, eventAccumulator.listener()); + List watchSnapshots; + try { + watchSnapshots = eventAccumulator.await(2); + } finally { + registration.remove(); + } + assertTrue(watchSnapshots.get(0).getMetadata().isFromCache()); + verifySnapshot(watchSnapshots.get(0), allData, expectedDocIds); + assertFalse(watchSnapshots.get(1).getMetadata().isFromCache()); + verifySnapshot(watchSnapshots.get(1), allData, expectedDocIds); + } + + public static void verifySnapshot( + QuerySnapshot snapshot, + Map> allData, + List expectedDocIds) { + List snapshotDocIds = querySnapshotToIds(snapshot); + assertEquals( + String.format( + "Did not get the same document size. Expected doc size: %d, Actual doc size: %d ", + expectedDocIds.size(), snapshotDocIds.size()), + expectedDocIds.size(), + snapshotDocIds.size()); + assertTrue( + String.format( + "Did not get the expected document IDs. Expected doc IDs: %s, Actual doc IDs: %s ", + expectedDocIds, snapshotDocIds), + expectedDocIds.equals(snapshotDocIds)); + + Map actualDocs = toDataMap(snapshot); + + for (String docId : expectedDocIds) { + Map expectedDoc = allData.get(docId); + Map actualDoc = (Map) actualDocs.get(docId); + + assertTrue( + String.format( + "Did not get the expected document content. Expected doc: %s, Actual doc: %s ", + expectedDoc, actualDoc), + expectedDoc.equals(actualDoc)); + } + } } diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonBinaryData.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonBinaryData.java new file mode 100644 index 00000000000..19e64892012 --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonBinaryData.java @@ -0,0 +1,118 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import com.google.protobuf.ByteString; +import java.util.Objects; +import javax.annotation.Nonnull; + +/** Represents a BSON Binary data type in Firestore documents. */ +public final class BsonBinaryData { + private final int subtype; + private final ByteString data; + + private BsonBinaryData(int subtype, @Nonnull ByteString data) { + // By definition the subtype should be 1 byte and should therefore + // have a value between 0 and 255 + if (subtype < 0 || subtype > 255) { + throw new IllegalArgumentException( + "The subtype for BsonBinaryData must be a value in the inclusive [0, 255] range."); + } + this.subtype = subtype; + this.data = data; + } + + /** + * Creates a new BsonBinaryData instance from the provided ByteString and subtype. + * + * @param subtype The subtype to use for this instance. + * @param byteString The byteString to use for this instance. + * @return The new BsonBinaryData instance + */ + @Nonnull + public static BsonBinaryData fromByteString(int subtype, @Nonnull ByteString byteString) { + return new BsonBinaryData(subtype, byteString); + } + + /** + * Creates a new BsonBinaryData instance from the provided bytes and subtype. Makes a copy of the + * bytes passed in. + * + * @param subtype The subtype to use for this instance. + * @param bytes The bytes to use for this instance. + * @return The new BsonBinaryData instance + */ + @Nonnull + public static BsonBinaryData fromBytes(int subtype, @Nonnull byte[] bytes) { + return new BsonBinaryData(subtype, ByteString.copyFrom(bytes)); + } + + /** + * Returns the underlying data as a ByteString. + * + * @return The data as a ByteString. + */ + @Nonnull + public ByteString dataAsByteString() { + return data; + } + + /** + * Returns a copy of the underlying data as a byte[] array. + * + * @return The data as a byte[] array. + */ + @Nonnull + public byte[] dataAsBytes() { + return data.toByteArray(); + } + + /** + * Returns the subtype of this binary data. + * + * @return The subtype of the binary data. + */ + public int subtype() { + return subtype; + } + + /** + * Returns true if this BsonBinaryData is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this BsonBinaryData is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof BsonBinaryData)) { + return false; + } + BsonBinaryData other = (BsonBinaryData) obj; + return subtype == other.subtype && data.equals(other.data); + } + + @Override + public int hashCode() { + return Objects.hash(subtype, data); + } + + @Override + public String toString() { + return "BsonBinaryData{subtype=" + subtype + ", data=" + data + "}"; + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonObjectId.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonObjectId.java new file mode 100644 index 00000000000..2e7e12c3ad8 --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonObjectId.java @@ -0,0 +1,59 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import androidx.annotation.NonNull; + +/** Represents a BSON ObjectId type in Firestore documents. */ +public final class BsonObjectId { + public final String value; + + /** + * Constructor that creates a new BSON ObjectId value with the given value. + * + * @param oid The 24-character hex string representing the ObjectId. + */ + public BsonObjectId(@NonNull String oid) { + this.value = oid; + } + + /** + * Returns true if this BsonObjectId is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this BsonObjectId is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof BsonObjectId)) { + return false; + } + BsonObjectId other = (BsonObjectId) obj; + return value.equals(other.value); + } + + @Override + public int hashCode() { + return value.hashCode(); + } + + @Override + public String toString() { + return "BsonObjectId{value='" + value + "'}"; + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonTimestamp.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonTimestamp.java new file mode 100644 index 00000000000..394a46cab84 --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/BsonTimestamp.java @@ -0,0 +1,72 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +/** Represents a BSON Timestamp type in Firestore documents. */ +public final class BsonTimestamp { + public final long seconds; + public final long increment; + + /** + * Constructor that creates a new BSON Timestamp value with the given values. + * + * @param seconds An unsigned 32-bit integer value stored as long representing the seconds. + * @param increment An unsigned 32-bit integer value stored as long representing the increment. + */ + public BsonTimestamp(long seconds, long increment) { + if (seconds < 0 || seconds > 4294967295L) { + throw new IllegalArgumentException( + String.format( + "The field 'seconds' value (%s) does not represent an unsigned 32-bit integer.", + seconds)); + } + if (increment < 0 || increment > 4294967295L) { + throw new IllegalArgumentException( + String.format( + "The field 'increment' value (%s) does not represent an unsigned 32-bit integer.", + increment)); + } + this.seconds = seconds; + this.increment = increment; + } + + /** + * Returns true if this BsonTimestampValue is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this BsonTimestampValue is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof BsonTimestamp)) { + return false; + } + BsonTimestamp other = (BsonTimestamp) obj; + return seconds == other.seconds && increment == other.increment; + } + + @Override + public int hashCode() { + return (int) (31 * seconds + increment); + } + + @Override + public String toString() { + return "BsonTimestampValue{seconds=" + seconds + ", increment=" + increment + "}"; + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/DocumentSnapshot.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/DocumentSnapshot.java index 4540608fc48..9fc60f674be 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/DocumentSnapshot.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/DocumentSnapshot.java @@ -497,6 +497,90 @@ public VectorValue getVectorValue(@NonNull String field) { return (VectorValue) get(field); } + /** + * Returns the value of the field as a MinKey. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a MinKey. + * @return The value of the field. + */ + @Nullable + public MinKey getMinKey(@NonNull String field) { + return (MinKey) get(field); + } + + /** + * Returns the value of the field as a MaxKey. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a MaxKey. + * @return The value of the field. + */ + @Nullable + public MaxKey getMaxKey(@NonNull String field) { + return (MaxKey) get(field); + } + + /** + * Returns the value of the field as a RegexValue. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a RegexValue. + * @return The value of the field. + */ + @Nullable + public RegexValue getRegexValue(@NonNull String field) { + return (RegexValue) get(field); + } + + /** + * Returns the value of the field as a 32-bit integer. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a Int32Value. + * @return The value of the field. + */ + @Nullable + public Int32Value getInt32Value(@NonNull String field) { + return (Int32Value) get(field); + } + + /** + * Returns the value of the field as a BsonObjectId. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a BsonObjectId. + * @return The value of the field. + */ + @Nullable + public BsonObjectId getBsonObjectId(@NonNull String field) { + return (BsonObjectId) get(field); + } + + /** + * Returns the value of the field as a BsonTimestampValue. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a BsonTimestampValue. + * @return The value of the field. + */ + @Nullable + public BsonTimestamp getBsonTimestamp(@NonNull String field) { + return (BsonTimestamp) get(field); + } + + /** + * Returns the value of the field as a BsonBinaryData. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a BsonBinaryData. + * @return The value of the field. + */ + @Nullable + public BsonBinaryData getBsonBinaryData(@NonNull String field) { + return (BsonBinaryData) get(field); + } + @Nullable private T getTypedValue(String field, Class clazz) { checkNotNull(field, "Provided field must not be null."); diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/FieldValue.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/FieldValue.java index 48f67e50e12..6c62faf5ada 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/FieldValue.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/FieldValue.java @@ -193,4 +193,82 @@ public static FieldValue increment(double l) { public static VectorValue vector(@NonNull double[] values) { return new VectorValue(values); } + + /** + * Creates a new {@link RegexValue} constructed using the given pattern and options. + * + * @param pattern The pattern to use for the new regular expression. + * @param options The options to use for the new regular expression. + * @return A new {@link RegexValue} constructed using the given pattern and options. + */ + @NonNull + public static RegexValue regex(@NonNull String pattern, @NonNull String options) { + return new RegexValue(pattern, options); + } + + /** + * Creates a new {@link Int32Value} constructed using the given number. + * + * @param value The number to use for constructing the Int32Value object. + * @return A new {@link Int32Value} constructed using the number. + */ + @NonNull + public static Int32Value int32(int value) { + return new Int32Value(value); + } + + /** + * Creates a new {@link BsonTimestamp} constructed using the given values. + * + * @param seconds The seconds value to use for the new BSON Timestamp. + * @param increment The increment value to use for the new BSON Timestamp. + * @return A new {@link BsonTimestamp} constructed using the given values. + */ + @NonNull + public static BsonTimestamp bsonTimestamp(long seconds, long increment) { + return new BsonTimestamp(seconds, increment); + } + + /** + * Creates a new {@link BsonObjectId} constructed using the given value. + * + * @param oid The 24-character hex string representation of the ObjectId. + * @return A new {@link BsonObjectId} constructed using the given value. + */ + @NonNull + public static BsonObjectId bsonObjectId(@NonNull String oid) { + return new BsonObjectId(oid); + } + + /** + * Creates a new {@link BsonBinaryData} constructed using the given values. + * + * @param subtype The subtype for the data. + * @param data The binary data as a byte array. + * @return A new {@link BsonBinaryData} constructed using the given values. + */ + @NonNull + public static BsonBinaryData bsonBinaryData(int subtype, @NonNull byte[] data) { + return BsonBinaryData.fromBytes(subtype, data); + } + + /** + * Returns a {@link MinKey} value. + * + * @return A {@link MinKey} object which is the same as all MinKey objects. + */ + @NonNull + public static MinKey minKey() { + return MinKey.instance(); + } + + /** + * Returns a {@link MaxKey} value. + * + * @return A {@link MaxKey} object which is the same as all MaxKey objects. + */ + @NonNull + public static MaxKey maxKey() { + return MaxKey.instance(); + } } diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/Int32Value.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/Int32Value.java new file mode 100644 index 00000000000..edcc47c3964 --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/Int32Value.java @@ -0,0 +1,52 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +/** Represents a 32-bit integer type in Firestore documents. */ +public final class Int32Value { + public final int value; + + public Int32Value(int value) { + this.value = value; + } + + /** + * Returns true if this Int32Value is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this Int32Value is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof Int32Value)) { + return false; + } + Int32Value other = (Int32Value) obj; + return value == other.value; + } + + @Override + public int hashCode() { + return value; + } + + @Override + public String toString() { + return "Int32Value{value=" + value + "}"; + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/MaxKey.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/MaxKey.java new file mode 100644 index 00000000000..4d43dae7f1d --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/MaxKey.java @@ -0,0 +1,45 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import androidx.annotation.NonNull; + +/** Represents the Firestore "Max Key" data type. */ +public final class MaxKey { + private static final MaxKey INSTANCE = new MaxKey(); + + private MaxKey() {} + + @NonNull + public static MaxKey instance() { + return INSTANCE; + } + + /** + * Returns true if this MaxKey is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this MaxKey is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + return obj == INSTANCE; + } + + @Override + public int hashCode() { + return MaxKey.class.getName().hashCode(); + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/MinKey.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/MinKey.java new file mode 100644 index 00000000000..e815d1fd3e1 --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/MinKey.java @@ -0,0 +1,45 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import androidx.annotation.NonNull; + +/** Represents the Firestore "Min Key" data type. */ +public final class MinKey { + private static final MinKey INSTANCE = new MinKey(); + + private MinKey() {} + + @NonNull + public static MinKey instance() { + return INSTANCE; + } + + /** + * Returns true if this MinKey is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this MinKey is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + return obj == INSTANCE; + } + + @Override + public int hashCode() { + return MinKey.class.getName().hashCode(); + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/RegexValue.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/RegexValue.java new file mode 100644 index 00000000000..1af0ce1f04d --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/RegexValue.java @@ -0,0 +1,56 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import androidx.annotation.NonNull; + +/** Represents a regular expression type in Firestore documents. */ +public final class RegexValue { + public final String pattern; + public final String options; + + public RegexValue(@NonNull String pattern, @NonNull String options) { + this.pattern = pattern; + this.options = options; + } + + /** + * Returns true if this RegexValue is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this RegexValue is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof RegexValue)) { + return false; + } + RegexValue other = (RegexValue) obj; + return pattern.equals(other.pattern) && options.equals(other.options); + } + + @Override + public int hashCode() { + return 31 * pattern.hashCode() + options.hashCode(); + } + + @Override + public String toString() { + return "RegexValue{pattern='" + pattern + "', options='" + options + "'}"; + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataReader.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataReader.java index 297479d0262..347677bd563 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataReader.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataReader.java @@ -43,6 +43,7 @@ import com.google.firestore.v1.ArrayValue; import com.google.firestore.v1.MapValue; import com.google.firestore.v1.Value; +import com.google.protobuf.ByteString; import com.google.protobuf.NullValue; import com.google.type.LatLng; import java.util.ArrayList; @@ -443,6 +444,22 @@ private Value parseScalarValue(Object input, ParseContext context) { .build(); } else if (input instanceof VectorValue) { return parseVectorValue(((VectorValue) input), context); + + } else if (input instanceof MinKey) { + return parseMinKey(); + } else if (input instanceof MaxKey) { + return parseMaxKey(); + } else if (input instanceof BsonObjectId) { + return parseBsonObjectId((BsonObjectId) input); + } else if (input instanceof BsonTimestamp) { + return parseBsonTimestamp((BsonTimestamp) input); + } else if (input instanceof BsonBinaryData) { + return parseBsonBinary((BsonBinaryData) input); + } else if (input instanceof RegexValue) { + return parseRegexValue((RegexValue) input); + } else if (input instanceof Int32Value) { + return parseInteger32Value((Int32Value) input); + } else if (input.getClass().isArray()) { throw context.createError("Arrays are not supported; use a List instead"); } else { @@ -459,6 +476,80 @@ private Value parseVectorValue(VectorValue vector, ParseContext context) { return Value.newBuilder().setMapValue(mapBuilder).build(); } + private Value parseMinKey() { + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_MIN_KEY, Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build()); + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + + private Value parseMaxKey() { + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_MAX_KEY, Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build()); + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + + private Value parseBsonObjectId(BsonObjectId objectId) { + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_OBJECT_ID_KEY, + Value.newBuilder().setStringValue((String) objectId.value).build()); + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + + private Value parseBsonTimestamp(BsonTimestamp timestamp) { + MapValue.Builder innerMapBuilder = MapValue.newBuilder(); + innerMapBuilder.putFields( + Values.RESERVED_BSON_TIMESTAMP_SECONDS_KEY, + Value.newBuilder().setIntegerValue(timestamp.seconds).build()); + innerMapBuilder.putFields( + Values.RESERVED_BSON_TIMESTAMP_INCREMENT_KEY, + Value.newBuilder().setIntegerValue(timestamp.increment).build()); + + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_BSON_TIMESTAMP_KEY, + Value.newBuilder().setMapValue(innerMapBuilder).build()); + + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + + private Value parseBsonBinary(BsonBinaryData binary) { + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_BSON_BINARY_KEY, + Value.newBuilder() + .setBytesValue( + ByteString.copyFrom(new byte[] {(byte) binary.subtype()}) + .concat(binary.dataAsByteString())) + .build()); + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + + private Value parseRegexValue(RegexValue regex) { + MapValue.Builder innerMapBuilder = MapValue.newBuilder(); + innerMapBuilder.putFields( + Values.RESERVED_REGEX_PATTERN_KEY, + Value.newBuilder().setStringValue(regex.pattern).build()); + innerMapBuilder.putFields( + Values.RESERVED_REGEX_OPTIONS_KEY, + Value.newBuilder().setStringValue(regex.options).build()); + + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_REGEX_KEY, Value.newBuilder().setMapValue(innerMapBuilder).build()); + + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + + private Value parseInteger32Value(Int32Value int32) { + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_INT32_KEY, Value.newBuilder().setIntegerValue(int32.value).build()); + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + private Value parseTimestamp(Timestamp timestamp) { // Firestore backend truncates precision down to microseconds. To ensure offline mode works // the same with regards to truncation, perform the truncation immediately without waiting for diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataWriter.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataWriter.java index d6ac7b90bba..805dee56ae0 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataWriter.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataWriter.java @@ -19,17 +19,24 @@ import static com.google.firebase.firestore.model.Values.TYPE_ORDER_ARRAY; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_BLOB; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_BOOLEAN; +import static com.google.firebase.firestore.model.Values.TYPE_ORDER_BSON_BINARY; +import static com.google.firebase.firestore.model.Values.TYPE_ORDER_BSON_OBJECT_ID; +import static com.google.firebase.firestore.model.Values.TYPE_ORDER_BSON_TIMESTAMP; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_GEOPOINT; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_MAP; +import static com.google.firebase.firestore.model.Values.TYPE_ORDER_MAX_KEY; +import static com.google.firebase.firestore.model.Values.TYPE_ORDER_MIN_KEY; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_NULL; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_NUMBER; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_REFERENCE; +import static com.google.firebase.firestore.model.Values.TYPE_ORDER_REGEX; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_SERVER_TIMESTAMP; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_STRING; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_TIMESTAMP; import static com.google.firebase.firestore.model.Values.TYPE_ORDER_VECTOR; import static com.google.firebase.firestore.model.Values.typeOrder; import static com.google.firebase.firestore.util.Assert.fail; +import static com.google.firestore.v1.Value.ValueTypeCase.MAP_VALUE; import androidx.annotation.RestrictTo; import com.google.firebase.Timestamp; @@ -39,6 +46,7 @@ import com.google.firebase.firestore.util.Logger; import com.google.firestore.v1.ArrayValue; import com.google.firestore.v1.Value; +import com.google.protobuf.ByteString; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -78,6 +86,9 @@ public Object convertValue(Value value) { case TYPE_ORDER_BOOLEAN: return value.getBooleanValue(); case TYPE_ORDER_NUMBER: + if (value.getValueTypeCase() == MAP_VALUE) { + return convertInt32(value.getMapValue().getFieldsMap()); + } return value.getValueTypeCase().equals(Value.ValueTypeCase.INTEGER_VALUE) ? (Object) value.getIntegerValue() // Cast to Object to prevent type coercion to double : (Object) value.getDoubleValue(); @@ -90,6 +101,19 @@ public Object convertValue(Value value) { value.getGeoPointValue().getLatitude(), value.getGeoPointValue().getLongitude()); case TYPE_ORDER_VECTOR: return convertVectorValue(value.getMapValue().getFieldsMap()); + case TYPE_ORDER_BSON_OBJECT_ID: + return convertBsonObjectId(value.getMapValue().getFieldsMap()); + case TYPE_ORDER_BSON_TIMESTAMP: + return convertBsonTimestamp(value.getMapValue().getFieldsMap()); + case TYPE_ORDER_BSON_BINARY: + return convertBsonBinary(value.getMapValue().getFieldsMap()); + case TYPE_ORDER_REGEX: + return convertRegex(value.getMapValue().getFieldsMap()); + case TYPE_ORDER_MAX_KEY: + return FieldValue.maxKey(); + case TYPE_ORDER_MIN_KEY: + return FieldValue.minKey(); + default: throw fail("Unknown value type: " + value.getValueTypeCase()); } @@ -115,6 +139,43 @@ VectorValue convertVectorValue(Map mapValue) { return new VectorValue(doubles); } + BsonObjectId convertBsonObjectId(Map mapValue) { + return new BsonObjectId(mapValue.get(Values.RESERVED_OBJECT_ID_KEY).getStringValue()); + } + + BsonTimestamp convertBsonTimestamp(Map mapValue) { + Map fields = + mapValue.get(Values.RESERVED_BSON_TIMESTAMP_KEY).getMapValue().getFieldsMap(); + return new BsonTimestamp( + fields.get(Values.RESERVED_BSON_TIMESTAMP_SECONDS_KEY).getIntegerValue(), + fields.get(Values.RESERVED_BSON_TIMESTAMP_INCREMENT_KEY).getIntegerValue()); + } + + BsonBinaryData convertBsonBinary(Map mapValue) { + ByteString bytes = mapValue.get(Values.RESERVED_BSON_BINARY_KEY).getBytesValue(); + // Note: A byte is interpreted as a signed 8-bit value. Since values larger than 127 have a + // leading '1' bit, simply casting them to integer results in sign-extension and lead to a + // negative integer value. For example, the byte `0x80` casted to `int` results in `-128`, + // rather than `128`, and the byte `0xFF` casted to `int` will be `-1` rather than `255`. + // Since we want the `subtype` to be an unsigned byte, we need to perform 0-extension (rather + // than sign-extension) to convert it to an int. + int subtype = bytes.byteAt(0) & 0xFF; + return BsonBinaryData.fromByteString(subtype, bytes.substring(1)); + } + + RegexValue convertRegex(Map mapValue) { + Map fields = + mapValue.get(Values.RESERVED_REGEX_KEY).getMapValue().getFieldsMap(); + + return new RegexValue( + fields.get(Values.RESERVED_REGEX_PATTERN_KEY).getStringValue(), + fields.get(Values.RESERVED_REGEX_OPTIONS_KEY).getStringValue()); + } + + Int32Value convertInt32(Map mapValue) { + return new Int32Value((int) mapValue.get(Values.RESERVED_INT32_KEY).getIntegerValue()); + } + private Object convertServerTimestamp(Value serverTimestampValue) { switch (serverTimestampBehavior) { case PREVIOUS: diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.java index 834fb2454a3..79b715182e0 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.java @@ -15,7 +15,6 @@ package com.google.firebase.firestore.model; import static com.google.firebase.firestore.model.ServerTimestamps.getLocalWriteTime; -import static com.google.firebase.firestore.model.ServerTimestamps.isServerTimestamp; import static com.google.firebase.firestore.util.Assert.fail; import static com.google.firebase.firestore.util.Assert.hardAssert; @@ -38,18 +37,49 @@ public class Values { public static final String TYPE_KEY = "__type__"; + + public static final String RESERVED_VECTOR_KEY = "__vector__"; + // For MinKey type + public static final String RESERVED_MIN_KEY = "__min__"; + + // For MaxKey type + public static final String RESERVED_MAX_KEY = "__max__"; + + // For Regex type + public static final String RESERVED_REGEX_KEY = "__regex__"; + public static final String RESERVED_REGEX_PATTERN_KEY = "pattern"; + public static final String RESERVED_REGEX_OPTIONS_KEY = "options"; + + // For ObjectId type + public static final String RESERVED_OBJECT_ID_KEY = "__oid__"; + + // For Int32 type + public static final String RESERVED_INT32_KEY = "__int__"; + + // For RequestTimestamp + public static final String RESERVED_BSON_TIMESTAMP_KEY = "__request_timestamp__"; + + public static final String RESERVED_BSON_TIMESTAMP_SECONDS_KEY = "seconds"; + public static final String RESERVED_BSON_TIMESTAMP_INCREMENT_KEY = "increment"; + + // For BSON Binary Data + public static final String RESERVED_BSON_BINARY_KEY = "__binary__"; + + public static final String RESERVED_SERVER_TIMESTAMP_KEY = "server_timestamp"; + public static final Value NAN_VALUE = Value.newBuilder().setDoubleValue(Double.NaN).build(); public static final Value NULL_VALUE = Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build(); public static final Value MIN_VALUE = NULL_VALUE; - public static final Value MAX_VALUE_TYPE = Value.newBuilder().setStringValue("__max__").build(); + public static final Value MAX_VALUE_TYPE = + Value.newBuilder().setStringValue(RESERVED_MAX_KEY).build(); public static final Value MAX_VALUE = Value.newBuilder() .setMapValue(MapValue.newBuilder().putFields(TYPE_KEY, MAX_VALUE_TYPE)) .build(); public static final Value VECTOR_VALUE_TYPE = - Value.newBuilder().setStringValue("__vector__").build(); + Value.newBuilder().setStringValue(RESERVED_VECTOR_KEY).build(); public static final String VECTOR_MAP_VECTORS_KEY = "value"; private static final Value MIN_VECTOR_VALUE = Value.newBuilder() @@ -67,17 +97,23 @@ public class Values { */ public static final int TYPE_ORDER_NULL = 0; - public static final int TYPE_ORDER_BOOLEAN = 1; - public static final int TYPE_ORDER_NUMBER = 2; - public static final int TYPE_ORDER_TIMESTAMP = 3; - public static final int TYPE_ORDER_SERVER_TIMESTAMP = 4; - public static final int TYPE_ORDER_STRING = 5; - public static final int TYPE_ORDER_BLOB = 6; - public static final int TYPE_ORDER_REFERENCE = 7; - public static final int TYPE_ORDER_GEOPOINT = 8; - public static final int TYPE_ORDER_ARRAY = 9; - public static final int TYPE_ORDER_VECTOR = 10; - public static final int TYPE_ORDER_MAP = 11; + public static final int TYPE_ORDER_MIN_KEY = 1; + public static final int TYPE_ORDER_BOOLEAN = 2; + public static final int TYPE_ORDER_NUMBER = 3; + public static final int TYPE_ORDER_TIMESTAMP = 4; + public static final int TYPE_ORDER_BSON_TIMESTAMP = 5; + public static final int TYPE_ORDER_SERVER_TIMESTAMP = 6; + public static final int TYPE_ORDER_STRING = 7; + public static final int TYPE_ORDER_BLOB = 8; + public static final int TYPE_ORDER_BSON_BINARY = 9; + public static final int TYPE_ORDER_REFERENCE = 10; + public static final int TYPE_ORDER_BSON_OBJECT_ID = 11; + public static final int TYPE_ORDER_GEOPOINT = 12; + public static final int TYPE_ORDER_REGEX = 13; + public static final int TYPE_ORDER_ARRAY = 14; + public static final int TYPE_ORDER_VECTOR = 15; + public static final int TYPE_ORDER_MAP = 16; + public static final int TYPE_ORDER_MAX_KEY = 17; public static final int TYPE_ORDER_MAX_VALUE = Integer.MAX_VALUE; @@ -89,7 +125,6 @@ public static int typeOrder(Value value) { case BOOLEAN_VALUE: return TYPE_ORDER_BOOLEAN; case INTEGER_VALUE: - return TYPE_ORDER_NUMBER; case DOUBLE_VALUE: return TYPE_ORDER_NUMBER; case TIMESTAMP_VALUE: @@ -105,12 +140,27 @@ public static int typeOrder(Value value) { case ARRAY_VALUE: return TYPE_ORDER_ARRAY; case MAP_VALUE: - if (isServerTimestamp(value)) { + MapRepresentation mapType = detectMapRepresentation(value); + if (mapType.equals(MapRepresentation.SERVER_TIMESTAMP)) { return TYPE_ORDER_SERVER_TIMESTAMP; - } else if (isMaxValue(value)) { + } else if (mapType.equals(MapRepresentation.INTERNAL_MAX)) { return TYPE_ORDER_MAX_VALUE; - } else if (isVectorValue(value)) { + } else if (mapType.equals(MapRepresentation.VECTOR)) { return TYPE_ORDER_VECTOR; + } else if (mapType.equals(MapRepresentation.MIN_KEY)) { + return TYPE_ORDER_MIN_KEY; + } else if (mapType.equals(MapRepresentation.MAX_KEY)) { + return TYPE_ORDER_MAX_KEY; + } else if (mapType.equals(MapRepresentation.REGEX)) { + return TYPE_ORDER_REGEX; + } else if (mapType.equals(MapRepresentation.BSON_TIMESTAMP)) { + return TYPE_ORDER_BSON_TIMESTAMP; + } else if (mapType.equals(MapRepresentation.BSON_OBJECT_ID)) { + return TYPE_ORDER_BSON_OBJECT_ID; + } else if (mapType.equals(MapRepresentation.BSON_BINARY)) { + return TYPE_ORDER_BSON_BINARY; + } else if (mapType.equals(MapRepresentation.INT32)) { + return TYPE_ORDER_NUMBER; } else { return TYPE_ORDER_MAP; } @@ -145,6 +195,9 @@ public static boolean equals(Value left, Value right) { case TYPE_ORDER_SERVER_TIMESTAMP: return getLocalWriteTime(left).equals(getLocalWriteTime(right)); case TYPE_ORDER_MAX_VALUE: + case TYPE_ORDER_NULL: + case TYPE_ORDER_MAX_KEY: + case TYPE_ORDER_MIN_KEY: return true; default: return left.equals(right); @@ -154,16 +207,33 @@ public static boolean equals(Value left, Value right) { private static boolean numberEquals(Value left, Value right) { if (left.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE && right.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE) { - return left.getIntegerValue() == right.getIntegerValue(); + return getIntegerValue(left) == getIntegerValue(right); } else if (left.getValueTypeCase() == Value.ValueTypeCase.DOUBLE_VALUE && right.getValueTypeCase() == Value.ValueTypeCase.DOUBLE_VALUE) { return Double.doubleToLongBits(left.getDoubleValue()) == Double.doubleToLongBits(right.getDoubleValue()); + } else if (detectMapRepresentation(left).equals(MapRepresentation.INT32) + && detectMapRepresentation(right).equals(MapRepresentation.INT32)) { + return getIntegerValue(left) == getIntegerValue(right); } return false; } + /** + * Returns a long from a 32-bit or 64-bit proto integer value. Throws an exception if the value is + * not an integer. + */ + private static long getIntegerValue(Value value) { + if (value.hasIntegerValue()) { + return value.getIntegerValue(); + } + if (value.hasMapValue() && value.getMapValue().getFieldsMap().containsKey(RESERVED_INT32_KEY)) { + return value.getMapValue().getFieldsMap().get(RESERVED_INT32_KEY).getIntegerValue(); + } + throw new IllegalArgumentException("getIntegerValue was called with a non-integer argument"); + } + private static boolean arrayEquals(Value left, Value right) { ArrayValue leftArray = left.getArrayValue(); ArrayValue rightArray = right.getArrayValue(); @@ -220,6 +290,8 @@ public static int compare(Value left, Value right) { switch (leftType) { case TYPE_ORDER_NULL: case TYPE_ORDER_MAX_VALUE: + case TYPE_ORDER_MAX_KEY: + case TYPE_ORDER_MIN_KEY: return 0; case TYPE_ORDER_BOOLEAN: return Util.compareBooleans(left.getBooleanValue(), right.getBooleanValue()); @@ -243,6 +315,14 @@ public static int compare(Value left, Value right) { return compareMaps(left.getMapValue(), right.getMapValue()); case TYPE_ORDER_VECTOR: return compareVectors(left.getMapValue(), right.getMapValue()); + case TYPE_ORDER_REGEX: + return compareRegex(left.getMapValue(), right.getMapValue()); + case TYPE_ORDER_BSON_OBJECT_ID: + return compareBsonObjectId(left.getMapValue(), right.getMapValue()); + case TYPE_ORDER_BSON_TIMESTAMP: + return compareBsonTimestamp(left.getMapValue(), right.getMapValue()); + case TYPE_ORDER_BSON_BINARY: + return compareBsonBinary(left.getMapValue(), right.getMapValue()); default: throw fail("Invalid value type: " + leftType); } @@ -284,15 +364,22 @@ private static int compareNumbers(Value left, Value right) { if (left.getValueTypeCase() == Value.ValueTypeCase.DOUBLE_VALUE) { double leftDouble = left.getDoubleValue(); if (right.getValueTypeCase() == Value.ValueTypeCase.DOUBLE_VALUE) { + // left and right are both doubles. return Util.compareDoubles(leftDouble, right.getDoubleValue()); - } else if (right.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE) { - return Util.compareMixed(leftDouble, right.getIntegerValue()); + } else if (right.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE + || detectMapRepresentation(right) == MapRepresentation.INT32) { + // left is a double and right is a 32/64-bit integer. + return Util.compareMixed(leftDouble, getIntegerValue(right)); } - } else if (left.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE) { - long leftLong = left.getIntegerValue(); - if (right.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE) { - return Util.compareLongs(leftLong, right.getIntegerValue()); + } else if (left.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE + || detectMapRepresentation(left) == MapRepresentation.INT32) { + long leftLong = getIntegerValue(left); + if (right.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE + || detectMapRepresentation(right) == MapRepresentation.INT32) { + // left and right both a 32/64-bit integer. + return Util.compareLongs(leftLong, getIntegerValue(right)); } else if (right.getValueTypeCase() == Value.ValueTypeCase.DOUBLE_VALUE) { + // left is a 32/64-bit integer and right is a double . return -1 * Util.compareMixed(right.getDoubleValue(), leftLong); } } @@ -363,6 +450,54 @@ private static int compareMaps(MapValue left, MapValue right) { return Util.compareBooleans(iterator1.hasNext(), iterator2.hasNext()); } + private static int compareRegex(MapValue left, MapValue right) { + Map leftMap = + left.getFieldsMap().get(RESERVED_REGEX_KEY).getMapValue().getFieldsMap(); + Map rightMap = + right.getFieldsMap().get(RESERVED_REGEX_KEY).getMapValue().getFieldsMap(); + + String leftPattern = leftMap.get(RESERVED_REGEX_PATTERN_KEY).getStringValue(); + String rightPattern = rightMap.get(RESERVED_REGEX_PATTERN_KEY).getStringValue(); + + int comp = Util.compareUtf8Strings(leftPattern, rightPattern); + if (comp != 0) return comp; + + String leftOption = leftMap.get(RESERVED_REGEX_OPTIONS_KEY).getStringValue(); + String rightOption = rightMap.get(RESERVED_REGEX_OPTIONS_KEY).getStringValue(); + + return leftOption.compareTo(rightOption); + } + + private static int compareBsonObjectId(MapValue left, MapValue right) { + String lhs = left.getFieldsMap().get(RESERVED_OBJECT_ID_KEY).getStringValue(); + String rhs = right.getFieldsMap().get(RESERVED_OBJECT_ID_KEY).getStringValue(); + return Util.compareUtf8Strings(lhs, rhs); + } + + private static int compareBsonTimestamp(MapValue left, MapValue right) { + Map leftMap = + left.getFieldsMap().get(RESERVED_BSON_TIMESTAMP_KEY).getMapValue().getFieldsMap(); + Map rightMap = + right.getFieldsMap().get(RESERVED_BSON_TIMESTAMP_KEY).getMapValue().getFieldsMap(); + + long leftSeconds = leftMap.get(RESERVED_BSON_TIMESTAMP_SECONDS_KEY).getIntegerValue(); + long rightSeconds = rightMap.get(RESERVED_BSON_TIMESTAMP_SECONDS_KEY).getIntegerValue(); + + int comp = Util.compareLongs(leftSeconds, rightSeconds); + if (comp != 0) return comp; + + long leftIncrement = leftMap.get(RESERVED_BSON_TIMESTAMP_INCREMENT_KEY).getIntegerValue(); + long rightIncrement = rightMap.get(RESERVED_BSON_TIMESTAMP_INCREMENT_KEY).getIntegerValue(); + + return Util.compareLongs(leftIncrement, rightIncrement); + } + + private static int compareBsonBinary(MapValue left, MapValue right) { + ByteString lhs = left.getFieldsMap().get(RESERVED_BSON_BINARY_KEY).getBytesValue(); + ByteString rhs = right.getFieldsMap().get(RESERVED_BSON_BINARY_KEY).getBytesValue(); + return Util.compareByteStrings(lhs, rhs); + } + private static int compareVectors(MapValue left, MapValue right) { Map leftMap = left.getFieldsMap(); Map rightMap = right.getFieldsMap(); @@ -396,7 +531,7 @@ private static void canonifyValue(StringBuilder builder, Value value) { builder.append(value.getBooleanValue()); break; case INTEGER_VALUE: - builder.append(value.getIntegerValue()); + builder.append(getIntegerValue(value)); break; case DOUBLE_VALUE: builder.append(value.getDoubleValue()); @@ -612,4 +747,74 @@ public static boolean isMaxValue(Value value) { public static boolean isVectorValue(Value value) { return VECTOR_VALUE_TYPE.equals(value.getMapValue().getFieldsMap().get(TYPE_KEY)); } + + public enum MapRepresentation { + REGEX, + BSON_OBJECT_ID, + INT32, + BSON_TIMESTAMP, + BSON_BINARY, + MIN_KEY, + MAX_KEY, + INTERNAL_MAX, + VECTOR, + SERVER_TIMESTAMP, + REGULAR_MAP + } + + public static MapRepresentation detectMapRepresentation(Value value) { + if (value == null + || value.getMapValue() == null + || value.getMapValue().getFieldsMap() == null) { + return MapRepresentation.REGULAR_MAP; + } + + Map fields = value.getMapValue().getFieldsMap(); + + // Check for type-based mappings + if (fields.containsKey(TYPE_KEY)) { + String typeString = fields.get(TYPE_KEY).getStringValue(); + + if (typeString.equals(RESERVED_VECTOR_KEY)) { + return MapRepresentation.VECTOR; + } + if (typeString.equals(RESERVED_MAX_KEY)) { + return MapRepresentation.INTERNAL_MAX; + } + if (typeString.equals(RESERVED_SERVER_TIMESTAMP_KEY)) { + return MapRepresentation.SERVER_TIMESTAMP; + } + } + + if (fields.size() != 1) { + // All BSON types have 1 key in the map. To improve performance, we can + // return early if the number of keys in the map is not 1. + return MapRepresentation.REGULAR_MAP; + } + + // Check for BSON-related mappings + if (fields.containsKey(RESERVED_REGEX_KEY)) { + return MapRepresentation.REGEX; + } + if (fields.containsKey(RESERVED_OBJECT_ID_KEY)) { + return MapRepresentation.BSON_OBJECT_ID; + } + if (fields.containsKey(RESERVED_INT32_KEY)) { + return MapRepresentation.INT32; + } + if (fields.containsKey(RESERVED_BSON_TIMESTAMP_KEY)) { + return MapRepresentation.BSON_TIMESTAMP; + } + if (fields.containsKey(RESERVED_BSON_BINARY_KEY)) { + return MapRepresentation.BSON_BINARY; + } + if (fields.containsKey(RESERVED_MIN_KEY)) { + return MapRepresentation.MIN_KEY; + } + if (fields.containsKey(RESERVED_MAX_KEY)) { + return MapRepresentation.MAX_KEY; + } + + return MapRepresentation.REGULAR_MAP; + } } diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/util/CustomClassMapper.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/util/CustomClassMapper.java index 6e0df1e6d4a..074e6ef6a25 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/util/CustomClassMapper.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/util/CustomClassMapper.java @@ -20,13 +20,20 @@ import android.net.Uri; import com.google.firebase.Timestamp; import com.google.firebase.firestore.Blob; +import com.google.firebase.firestore.BsonBinaryData; +import com.google.firebase.firestore.BsonObjectId; +import com.google.firebase.firestore.BsonTimestamp; import com.google.firebase.firestore.DocumentId; import com.google.firebase.firestore.DocumentReference; import com.google.firebase.firestore.Exclude; import com.google.firebase.firestore.FieldValue; import com.google.firebase.firestore.GeoPoint; import com.google.firebase.firestore.IgnoreExtraProperties; +import com.google.firebase.firestore.Int32Value; +import com.google.firebase.firestore.MaxKey; +import com.google.firebase.firestore.MinKey; import com.google.firebase.firestore.PropertyName; +import com.google.firebase.firestore.RegexValue; import com.google.firebase.firestore.ServerTimestamp; import com.google.firebase.firestore.ThrowOnExtraProperties; import com.google.firebase.firestore.VectorValue; @@ -175,7 +182,14 @@ private static Object serialize(T o, ErrorPath path) { || o instanceof Blob || o instanceof DocumentReference || o instanceof FieldValue - || o instanceof VectorValue) { + || o instanceof VectorValue + || o instanceof MinKey + || o instanceof MaxKey + || o instanceof RegexValue + || o instanceof Int32Value + || o instanceof BsonTimestamp + || o instanceof BsonObjectId + || o instanceof BsonBinaryData) { return o; } else if (o instanceof Uri || o instanceof URI || o instanceof URL) { return o.toString(); @@ -245,6 +259,20 @@ private static T deserializeToClass(Object o, Class clazz, DeserializeCon return (T) convertDocumentReference(o, context); } else if (VectorValue.class.isAssignableFrom(clazz)) { return (T) convertVectorValue(o, context); + } else if (Int32Value.class.isAssignableFrom(clazz)) { + return (T) convertInt32Value(o, context); + } else if (BsonTimestamp.class.isAssignableFrom(clazz)) { + return (T) convertBsonTimestamp(o, context); + } else if (BsonObjectId.class.isAssignableFrom(clazz)) { + return (T) convertBsonObjectId(o, context); + } else if (BsonBinaryData.class.isAssignableFrom(clazz)) { + return (T) convertBsonBinaryData(o, context); + } else if (MinKey.class.isAssignableFrom(clazz)) { + return (T) convertMinKey(o, context); + } else if (MaxKey.class.isAssignableFrom(clazz)) { + return (T) convertMaxKey(o, context); + } else if (RegexValue.class.isAssignableFrom(clazz)) { + return (T) convertRegexValue(o, context); } else if (clazz.isArray()) { throw deserializeError( context.errorPath, "Converting to Arrays is not supported, please use Lists instead"); @@ -542,6 +570,77 @@ private static VectorValue convertVectorValue(Object o, DeserializeContext conte } } + private static Int32Value convertInt32Value(Object o, DeserializeContext context) { + if (o instanceof Int32Value) { + return (Int32Value) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to Int32Value"); + } + } + + private static BsonTimestamp convertBsonTimestamp(Object o, DeserializeContext context) { + if (o instanceof BsonTimestamp) { + return (BsonTimestamp) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to BsonTimestamp"); + } + } + + private static BsonObjectId convertBsonObjectId(Object o, DeserializeContext context) { + if (o instanceof BsonObjectId) { + return (BsonObjectId) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to BsonObjectId"); + } + } + + private static BsonBinaryData convertBsonBinaryData(Object o, DeserializeContext context) { + + if (o instanceof BsonBinaryData) { + return (BsonBinaryData) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to BsonBinaryData"); + } + } + + private static RegexValue convertRegexValue(Object o, DeserializeContext context) { + if (o instanceof RegexValue) { + return (RegexValue) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to RegexValue"); + } + } + + private static MinKey convertMinKey(Object o, DeserializeContext context) { + if (o instanceof MinKey) { + return (MinKey) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to MinKey"); + } + } + + private static MaxKey convertMaxKey(Object o, DeserializeContext context) { + if (o instanceof MaxKey) { + return (MaxKey) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to MaxKey"); + } + } + private static DocumentReference convertDocumentReference(Object o, DeserializeContext context) { if (o instanceof DocumentReference) { return (DocumentReference) o; diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/BsonTypesTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/BsonTypesTest.java new file mode 100644 index 00000000000..b1df92b4082 --- /dev/null +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/BsonTypesTest.java @@ -0,0 +1,151 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import static com.google.firebase.firestore.testutil.Assert.assertThrows; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; + +import org.junit.Test; +import org.junit.runner.RunWith; +import org.robolectric.RobolectricTestRunner; +import org.robolectric.annotation.Config; + +@RunWith(RobolectricTestRunner.class) +@Config(manifest = Config.NONE) +public class BsonTypesTest { + + @Test + public void testBsonObjectIdEquality() { + BsonObjectId bsonObjectId = new BsonObjectId("507f191e810c19729de860ea"); + BsonObjectId bsonObjectIdDup = FieldValue.bsonObjectId("507f191e810c19729de860ea"); + BsonObjectId differentObjectId = new BsonObjectId("507f191e810c19729de860eb"); + + assertEquals(bsonObjectId, bsonObjectIdDup); + assertNotEquals(bsonObjectId, differentObjectId); + assertNotEquals(bsonObjectIdDup, differentObjectId); + + assertEquals(bsonObjectId.hashCode(), bsonObjectIdDup.hashCode()); + assertNotEquals(bsonObjectId.hashCode(), differentObjectId.hashCode()); + assertNotEquals(bsonObjectIdDup.hashCode(), differentObjectId.hashCode()); + } + + @Test + public void testBsonTimeStampEquality() { + BsonTimestamp bsonTimestamp = new BsonTimestamp(1, 2); + BsonTimestamp bsonTimestampDup = FieldValue.bsonTimestamp(1, 2); + BsonTimestamp differentSecondsTimestamp = new BsonTimestamp(2, 2); + BsonTimestamp differentIncrementTimestamp = new BsonTimestamp(1, 3); + + assertEquals(bsonTimestamp, bsonTimestampDup); + assertNotEquals(bsonTimestamp, differentSecondsTimestamp); + assertNotEquals(bsonTimestamp, differentIncrementTimestamp); + assertNotEquals(bsonTimestampDup, differentSecondsTimestamp); + assertNotEquals(bsonTimestampDup, differentIncrementTimestamp); + + assertEquals(bsonTimestamp.hashCode(), bsonTimestampDup.hashCode()); + assertNotEquals(bsonTimestamp.hashCode(), differentSecondsTimestamp.hashCode()); + assertNotEquals(bsonTimestamp.hashCode(), differentIncrementTimestamp.hashCode()); + assertNotEquals(bsonTimestampDup.hashCode(), differentSecondsTimestamp.hashCode()); + assertNotEquals(bsonTimestampDup.hashCode(), differentIncrementTimestamp.hashCode()); + } + + @Test + public void testBsonBinaryDataEquality() { + BsonBinaryData bsonBinaryData = BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}); + BsonBinaryData bsonBinaryDataDup = FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}); + BsonBinaryData differentSubtypeBinaryData = BsonBinaryData.fromBytes(2, new byte[] {1, 2, 3}); + BsonBinaryData differentDataBinaryData = BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}); + + assertEquals(bsonBinaryData, bsonBinaryDataDup); + assertNotEquals(bsonBinaryData, differentSubtypeBinaryData); + assertNotEquals(bsonBinaryData, differentDataBinaryData); + assertNotEquals(bsonBinaryDataDup, differentSubtypeBinaryData); + assertNotEquals(bsonBinaryDataDup, differentDataBinaryData); + + assertEquals(bsonBinaryData.hashCode(), bsonBinaryDataDup.hashCode()); + assertNotEquals(bsonBinaryData.hashCode(), differentSubtypeBinaryData.hashCode()); + assertNotEquals(bsonBinaryData.hashCode(), differentDataBinaryData.hashCode()); + assertNotEquals(bsonBinaryDataDup.hashCode(), differentSubtypeBinaryData.hashCode()); + assertNotEquals(bsonBinaryDataDup.hashCode(), differentDataBinaryData.hashCode()); + } + + @Test + public void testRegexEquality() { + RegexValue regex = new RegexValue("^foo", "i"); + RegexValue regexDup = FieldValue.regex("^foo", "i"); + RegexValue differentPatternRegex = new RegexValue("^bar", "i"); + RegexValue differentOptionsRegex = new RegexValue("^foo", "m"); + + assertEquals(regex, regexDup); + assertNotEquals(regex, differentPatternRegex); + assertNotEquals(regex, differentOptionsRegex); + assertNotEquals(regexDup, differentPatternRegex); + assertNotEquals(regexDup, differentOptionsRegex); + + assertEquals(regex.hashCode(), regexDup.hashCode()); + assertNotEquals(regex.hashCode(), differentPatternRegex.hashCode()); + assertNotEquals(regex.hashCode(), differentOptionsRegex.hashCode()); + assertNotEquals(regexDup.hashCode(), differentPatternRegex.hashCode()); + assertNotEquals(regexDup.hashCode(), differentOptionsRegex.hashCode()); + } + + @Test + public void testInt32Equality() { + Int32Value int32 = new Int32Value(1); + Int32Value int32Dup = FieldValue.int32(1); + Int32Value differentInt32 = new Int32Value(2); + + assertEquals(int32, int32Dup); + assertNotEquals(int32, differentInt32); + assertNotEquals(int32Dup, differentInt32); + + assertEquals(int32.hashCode(), int32Dup.hashCode()); + assertNotEquals(int32.hashCode(), differentInt32.hashCode()); + assertNotEquals(int32Dup.hashCode(), differentInt32.hashCode()); + } + + @Test + public void testMaxKeyIsSingleton() { + MaxKey maxKey = FieldValue.maxKey(); + MaxKey maxKeyDup = MaxKey.instance(); + assertEquals(maxKey, maxKeyDup); + assertEquals(maxKey.hashCode(), maxKeyDup.hashCode()); + } + + @Test + public void testMinKeyIsSingleton() { + MinKey minKey = FieldValue.minKey(); + MinKey minKeyDup = MinKey.instance(); + assertEquals(minKey, minKeyDup); + assertEquals(minKey.hashCode(), minKeyDup.hashCode()); + } + + @Test + public void testMinKeyMaxKeyNullNotEqual() { + MinKey minKey = FieldValue.minKey(); + MaxKey maxKey = FieldValue.maxKey(); + assertNotEquals(minKey, maxKey); + assertNotEquals(minKey, null); + assertNotEquals(maxKey, null); + assertNotEquals(minKey.hashCode(), maxKey.hashCode()); + } + + @Test + public void testThrows() { + assertThrows( + IllegalArgumentException.class, () -> BsonBinaryData.fromBytes(256, new byte[] {1})); + } +} diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/FieldValueTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/FieldValueTest.java index 7540c06d2e5..58ed3a9f080 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/FieldValueTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/FieldValueTest.java @@ -32,6 +32,21 @@ public void testEquals() { FieldValue deleteDup = FieldValue.delete(); FieldValue serverTimestamp = FieldValue.serverTimestamp(); FieldValue serverTimestampDup = FieldValue.serverTimestamp(); + RegexValue regex = FieldValue.regex("pattern", "options"); + RegexValue regexDup = FieldValue.regex("pattern", "options"); + Int32Value int32 = FieldValue.int32(1); + Int32Value int32Dup = FieldValue.int32(1); + BsonTimestamp bsonTimestamp = FieldValue.bsonTimestamp(1, 2); + BsonTimestamp bsonTimestampDup = FieldValue.bsonTimestamp(1, 2); + BsonObjectId bsonObjectId = FieldValue.bsonObjectId("507f191e810c19729de860ea"); + BsonObjectId bsonObjectIdDup = FieldValue.bsonObjectId("507f191e810c19729de860ea"); + BsonBinaryData bsonBinary = FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}); + BsonBinaryData bsonBinaryDup = FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}); + MinKey minKey = FieldValue.minKey(); + MinKey minKeyDup = FieldValue.minKey(); + MaxKey maxKey = FieldValue.maxKey(); + MaxKey maxKeyDup = FieldValue.maxKey(); + assertEquals(delete, deleteDup); assertEquals(serverTimestamp, serverTimestampDup); assertNotEquals(delete, serverTimestamp); @@ -39,5 +54,77 @@ public void testEquals() { assertEquals(delete.hashCode(), deleteDup.hashCode()); assertEquals(serverTimestamp.hashCode(), serverTimestampDup.hashCode()); assertNotEquals(delete.hashCode(), serverTimestamp.hashCode()); + + // BSON types + assertEquals(regex, regexDup); + assertEquals(int32, int32Dup); + assertEquals(bsonTimestamp, bsonTimestampDup); + assertEquals(bsonObjectId, bsonObjectIdDup); + assertEquals(bsonBinary, bsonBinaryDup); + assertEquals(minKey, minKeyDup); + assertEquals(maxKey, maxKeyDup); + assertNotEquals(delete, serverTimestamp); + + // BSON types are not equal to each other + assertNotEquals(regex, int32); + assertNotEquals(regex, bsonTimestamp); + assertNotEquals(regex, bsonObjectId); + assertNotEquals(regex, bsonBinary); + assertNotEquals(regex, minKey); + assertNotEquals(regex, maxKey); + + assertNotEquals(int32, bsonTimestamp); + assertNotEquals(int32, bsonObjectId); + assertNotEquals(int32, bsonBinary); + assertNotEquals(int32, minKey); + assertNotEquals(int32, maxKey); + + assertNotEquals(bsonTimestamp, bsonObjectId); + assertNotEquals(bsonTimestamp, bsonBinary); + assertNotEquals(bsonTimestamp, minKey); + assertNotEquals(bsonTimestamp, maxKey); + + assertNotEquals(bsonObjectId, bsonBinary); + assertNotEquals(bsonObjectId, minKey); + assertNotEquals(bsonObjectId, maxKey); + + assertNotEquals(minKey, maxKey); + + // BSON types hash codes + assertEquals(regex.hashCode(), regexDup.hashCode()); + assertEquals(int32.hashCode(), int32Dup.hashCode()); + assertEquals(bsonTimestamp.hashCode(), bsonTimestampDup.hashCode()); + assertEquals(bsonObjectId.hashCode(), bsonObjectIdDup.hashCode()); + assertEquals(bsonBinary.hashCode(), bsonBinaryDup.hashCode()); + assertEquals(minKey.hashCode(), minKeyDup.hashCode()); + assertEquals(maxKey.hashCode(), maxKeyDup.hashCode()); + + // BSON types hash codes are not equal to each other + assertNotEquals(regex.hashCode(), int32.hashCode()); + assertNotEquals(regex.hashCode(), bsonTimestamp.hashCode()); + assertNotEquals(regex.hashCode(), bsonObjectId.hashCode()); + assertNotEquals(regex.hashCode(), bsonBinary.hashCode()); + assertNotEquals(regex.hashCode(), minKey.hashCode()); + assertNotEquals(regex.hashCode(), maxKey.hashCode()); + + assertNotEquals(int32.hashCode(), bsonTimestamp.hashCode()); + assertNotEquals(int32.hashCode(), bsonObjectId.hashCode()); + assertNotEquals(int32.hashCode(), bsonBinary.hashCode()); + assertNotEquals(int32.hashCode(), minKey.hashCode()); + assertNotEquals(int32.hashCode(), maxKey.hashCode()); + + assertNotEquals(bsonTimestamp.hashCode(), bsonObjectId.hashCode()); + assertNotEquals(bsonTimestamp.hashCode(), bsonBinary.hashCode()); + assertNotEquals(bsonTimestamp.hashCode(), minKey.hashCode()); + assertNotEquals(bsonTimestamp.hashCode(), maxKey.hashCode()); + + assertNotEquals(bsonObjectId.hashCode(), bsonBinary.hashCode()); + assertNotEquals(bsonObjectId.hashCode(), minKey.hashCode()); + assertNotEquals(bsonObjectId.hashCode(), maxKey.hashCode()); + + assertNotEquals(bsonBinary.hashCode(), minKey.hashCode()); + assertNotEquals(bsonBinary.hashCode(), maxKey.hashCode()); + + assertNotEquals(minKey.hashCode(), maxKey.hashCode()); } } diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/UserDataWriterTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/UserDataWriterTest.java index a856f316ff1..3175ed05b6a 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/UserDataWriterTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/UserDataWriterTest.java @@ -34,6 +34,7 @@ import com.google.firebase.firestore.model.Values; import com.google.firestore.v1.ArrayValue; import com.google.firestore.v1.Value; +import com.google.protobuf.ByteString; import java.util.Date; import java.util.HashMap; import java.util.List; @@ -220,6 +221,81 @@ public void testConvertsGeoPointValue() { } } + @Test + public void testConvertsBsonObjectIdValue() { + List testCases = asList(new BsonObjectId("foo"), FieldValue.bsonObjectId("bar")); + for (BsonObjectId p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + + @Test + public void testConvertsBsonTimestampValue() { + List testCases = asList(new BsonTimestamp(1, 2), FieldValue.bsonTimestamp(3, 4)); + for (BsonTimestamp p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + + @Test + public void testConvertsBsonBinaryValue() { + List testCases = + asList( + BsonBinaryData.fromBytes(1, new byte[] {1, 2}), + BsonBinaryData.fromByteString(1, ByteString.EMPTY), + FieldValue.bsonBinaryData(1, new byte[] {1, 2})); + for (BsonBinaryData p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + + @Test + public void testConvertsRegexValue() { + List testCases = asList(new RegexValue("^foo", "i"), FieldValue.regex("^bar", "g")); + for (RegexValue p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + + @Test + public void testConvertsInt32Value() { + List testCases = + asList(new Int32Value(1), new Int32Value(-1), new Int32Value(0), FieldValue.int32(123)); + for (Int32Value p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + + @Test + public void testConvertsMinKey() { + List testCases = asList(FieldValue.minKey(), MinKey.instance()); + for (MinKey p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + + @Test + public void testConvertsMaxKey() { + List testCases = asList(FieldValue.maxKey(), MaxKey.instance()); + for (MaxKey p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + @Test public void testConvertsEmptyObjects() { assertEquals(wrapObject(), new ObjectValue()); diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/bundle/BundleSerializerTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/bundle/BundleSerializerTest.java index 355833a6586..6e0186adac8 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/bundle/BundleSerializerTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/bundle/BundleSerializerTest.java @@ -30,6 +30,7 @@ import com.google.firebase.firestore.model.ObjectValue; import com.google.firebase.firestore.model.ResourcePath; import com.google.firebase.firestore.model.SnapshotVersion; +import com.google.firebase.firestore.model.Values; import com.google.firebase.firestore.remote.RemoteSerializer; import com.google.firebase.firestore.testutil.TestUtil; import com.google.firestore.v1.ArrayValue; @@ -215,6 +216,109 @@ public void testDecodesReferenceValues() throws JSONException { assertDecodesValue(json, proto.build()); } + @Test + public void testDecodesBsonObjectIdValues() throws JSONException { + String json = "{ mapValue: { fields: { __oid__: { stringValue: 'foo' } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_OBJECT_ID_KEY, Value.newBuilder().setStringValue("foo").build())); + + assertDecodesValue(json, proto.build()); + } + + @Test + public void testDecodesBsonTimestampValues() throws JSONException { + String json = + "{ mapValue: { fields: { __request_timestamp__: { mapValue: { fields: { seconds: { integerValue: 12345 }, increment: { integerValue: 67 } } } } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_BSON_TIMESTAMP_KEY, + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_BSON_TIMESTAMP_SECONDS_KEY, + Value.newBuilder().setIntegerValue(12345).build()) + .putFields( + Values.RESERVED_BSON_TIMESTAMP_INCREMENT_KEY, + Value.newBuilder().setIntegerValue(67).build())) + .build())); + assertDecodesValue(json, proto.build()); + } + + @Test + public void testDecodesBsonBinaryDataValues() throws JSONException { + String json = "{ mapValue: { fields: { __binary__: { bytesValue: 'AAECAw==' } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_BSON_BINARY_KEY, + Value.newBuilder().setBytesValue(TestUtil.byteString(0, 1, 2, 3)).build())); + assertDecodesValue(json, proto.build()); + } + + @Test + public void testDecodesRegexValues() throws JSONException { + String json = + "{ mapValue: { fields: { __regex__: { mapValue: { fields: { pattern: { stringValue: '^foo' }, options: { stringValue: 'i' } } } } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_REGEX_KEY, + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_REGEX_PATTERN_KEY, + Value.newBuilder().setStringValue("^foo").build()) + .putFields( + Values.RESERVED_REGEX_OPTIONS_KEY, + Value.newBuilder().setStringValue("i").build())) + .build())); + assertDecodesValue(json, proto.build()); + } + + @Test + public void testDecodesInt32Values() throws JSONException { + String json = "{ mapValue: { fields: { __int__: { integerValue: 12345 } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_INT32_KEY, Value.newBuilder().setIntegerValue(12345).build())); + assertDecodesValue(json, proto.build()); + } + + @Test + public void testDecodesMinKey() throws JSONException { + String json = "{ mapValue: { fields: { __min__: { nullValue: null } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_MIN_KEY, + Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build())); + assertDecodesValue(json, proto.build()); + } + + @Test + public void testDecodesMaxKey() throws JSONException { + String json = "{ mapValue: { fields: { __max__: { nullValue: null } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_MAX_KEY, + Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build())); + assertDecodesValue(json, proto.build()); + } + @Test public void testDecodesArrayValues() throws JSONException { String json = diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/core/QueryTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/core/QueryTest.java index de3de67463c..7ee54cda372 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/core/QueryTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/core/QueryTest.java @@ -34,6 +34,7 @@ import com.google.firebase.Timestamp; import com.google.firebase.firestore.Blob; +import com.google.firebase.firestore.FieldValue; import com.google.firebase.firestore.GeoPoint; import com.google.firebase.firestore.model.DocumentKey; import com.google.firebase.firestore.model.MutableDocument; @@ -839,6 +840,29 @@ public void testCanonicalIdsAreStable() { "collection|f:|ob:aasc__name__asc|ub:a:foo,[1,2,3]"); assertCanonicalId(baseQuery.limitToFirst(5), "collection|f:|ob:__name__asc|l:5"); assertCanonicalId(baseQuery.limitToLast(5), "collection|f:|ob:__name__desc|l:5"); + + // BSON types + assertCanonicalId( + baseQuery.filter(filter("a", "<=", FieldValue.bsonObjectId("foo"))), + "collection|f:a<={__oid__:foo}|ob:aasc__name__asc"); + assertCanonicalId( + baseQuery.filter(filter("a", "<=", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))), + "collection|f:a<={__binary__:01010203}|ob:aasc__name__asc"); + assertCanonicalId( + baseQuery.filter(filter("a", "<=", FieldValue.bsonTimestamp(1, 2))), + "collection|f:a<={__request_timestamp__:{increment:2,seconds:1}}|ob:aasc__name__asc"); + assertCanonicalId( + baseQuery.filter(filter("a", "<=", FieldValue.regex("^foo", "i"))), + "collection|f:a<={__regex__:{options:i,pattern:^foo}}|ob:aasc__name__asc"); + assertCanonicalId( + baseQuery.filter(filter("a", "<=", FieldValue.int32(1))), + "collection|f:a<={__int__:1}|ob:aasc__name__asc"); + assertCanonicalId( + baseQuery.filter(filter("a", "<=", FieldValue.minKey())), + "collection|f:a<={__min__:null}|ob:aasc__name__asc"); + assertCanonicalId( + baseQuery.filter(filter("a", "<=", FieldValue.maxKey())), + "collection|f:a<={__max__:null}|ob:aasc__name__asc"); } private void assertCanonicalId(Query query, String expectedCanonicalId) { diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java index 6a7dbe9c259..400e34481dd 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java @@ -26,11 +26,19 @@ import com.google.common.testing.EqualsTester; import com.google.firebase.Timestamp; +import com.google.firebase.firestore.BsonBinaryData; +import com.google.firebase.firestore.BsonObjectId; +import com.google.firebase.firestore.BsonTimestamp; import com.google.firebase.firestore.FieldValue; import com.google.firebase.firestore.GeoPoint; +import com.google.firebase.firestore.Int32Value; +import com.google.firebase.firestore.MaxKey; +import com.google.firebase.firestore.MinKey; +import com.google.firebase.firestore.RegexValue; import com.google.firebase.firestore.testutil.ComparatorTester; import com.google.firebase.firestore.testutil.TestUtil; import com.google.firestore.v1.Value; +import com.google.protobuf.ByteString; import java.util.Arrays; import java.util.Calendar; import java.util.Collections; @@ -64,6 +72,28 @@ public void testValueEquality() { GeoPoint geoPoint2 = new GeoPoint(0, 2); Timestamp timestamp1 = new Timestamp(date1); Timestamp timestamp2 = new Timestamp(date2); + + BsonObjectId objectId1 = new BsonObjectId("507f191e810c19729de860ea"); + BsonObjectId objectId2 = new BsonObjectId("507f191e810c19729de860eb"); + + BsonBinaryData binaryData1 = BsonBinaryData.fromBytes(1, new byte[] {1, 2}); + BsonBinaryData binaryData2 = BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}); + BsonBinaryData binaryData3 = BsonBinaryData.fromBytes(2, new byte[] {1, 2}); + + BsonTimestamp bsonTimestamp1 = new BsonTimestamp(1, 2); + BsonTimestamp bsonTimestamp2 = new BsonTimestamp(1, 3); + BsonTimestamp bsonTimestamp3 = new BsonTimestamp(2, 2); + + Int32Value int32Value1 = FieldValue.int32(1); + Int32Value int32Value2 = FieldValue.int32(2); + + RegexValue regexValue1 = FieldValue.regex("^foo", "i"); + RegexValue regexValue2 = FieldValue.regex("^foo", "m"); + RegexValue regexValue3 = FieldValue.regex("^bar", "i"); + + MinKey minKey = FieldValue.minKey(); + MaxKey maxKey = FieldValue.maxKey(); + new EqualsTester() .addEqualityGroup(wrap(true), wrap(true)) .addEqualityGroup(wrap(false), wrap(false)) @@ -108,6 +138,22 @@ public void testValueEquality() { .addEqualityGroup(wrap(map("bar", 2, "foo", 1))) .addEqualityGroup(wrap(map("bar", 1))) .addEqualityGroup(wrap(map("foo", 1))) + .addEqualityGroup(wrap(new BsonObjectId("507f191e810c19729de860ea")), wrap(objectId1)) + .addEqualityGroup(wrap(new BsonObjectId("507f191e810c19729de860eb")), wrap(objectId2)) + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(1, new byte[] {1, 2})), wrap(binaryData1)) + .addEqualityGroup( + wrap(BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})), wrap(binaryData2)) + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(2, new byte[] {1, 2})), wrap(binaryData3)) + .addEqualityGroup(wrap(new BsonTimestamp(1, 2)), wrap(bsonTimestamp1)) + .addEqualityGroup(wrap(new BsonTimestamp(1, 3)), wrap(bsonTimestamp2)) + .addEqualityGroup(wrap(new BsonTimestamp(2, 2)), wrap(bsonTimestamp3)) + .addEqualityGroup(wrap(FieldValue.int32(1)), wrap(int32Value1)) + .addEqualityGroup(wrap(FieldValue.int32(2)), wrap(int32Value2)) + .addEqualityGroup(wrap(FieldValue.regex("^foo", "i")), wrap(regexValue1)) + .addEqualityGroup(wrap(FieldValue.regex("^foo", "m")), wrap(regexValue2)) + .addEqualityGroup(wrap(FieldValue.regex("^bar", "i")), wrap(regexValue3)) + .addEqualityGroup(wrap(FieldValue.minKey()), wrap(minKey)) + .addEqualityGroup(wrap(FieldValue.maxKey()), wrap(maxKey)) .testEquals(); } @@ -120,27 +166,39 @@ public void testValueOrdering() { // null first .addEqualityGroup(wrap((Object) null)) + // MinKey is after null + .addEqualityGroup(wrap(FieldValue.minKey()), wrap(MinKey.instance())) + // booleans .addEqualityGroup(wrap(false)) .addEqualityGroup(wrap(true)) - // numbers + // 64-bit and 32-bit numbers order together numerically. .addEqualityGroup(wrap(Double.NaN)) .addEqualityGroup(wrap(Double.NEGATIVE_INFINITY)) .addEqualityGroup(wrap(-Double.MAX_VALUE)) .addEqualityGroup(wrap(Long.MIN_VALUE)) + .addEqualityGroup( + wrap(new Int32Value(-2147483648)), + wrap(FieldValue.int32(-2147483648)), + wrap(Integer.MIN_VALUE)) .addEqualityGroup(wrap(-1.1)) .addEqualityGroup(wrap(-1.0)) .addEqualityGroup(wrap(-Double.MIN_NORMAL)) .addEqualityGroup(wrap(-Double.MIN_VALUE)) // Zeros all compare the same. - .addEqualityGroup(wrap(-0.0), wrap(0.0), wrap(0L)) + .addEqualityGroup( + wrap(-0.0), wrap(0.0), wrap(0L), wrap(new Int32Value(0)), wrap(FieldValue.int32(0))) .addEqualityGroup(wrap(Double.MIN_VALUE)) .addEqualityGroup(wrap(Double.MIN_NORMAL)) .addEqualityGroup(wrap(0.1)) - // Doubles and Longs compareTo() the same. - .addEqualityGroup(wrap(1.0), wrap(1L)) + // Doubles, Longs, Int32Values compareTo() the same. + .addEqualityGroup(wrap(1.0), wrap(1L), wrap(new Int32Value(1))) .addEqualityGroup(wrap(1.1)) + .addEqualityGroup( + wrap(new Int32Value(2147483647)), + wrap(FieldValue.int32(2147483647)), + wrap(Integer.MAX_VALUE)) .addEqualityGroup(wrap(Long.MAX_VALUE)) .addEqualityGroup(wrap(Double.MAX_VALUE)) .addEqualityGroup(wrap(Double.POSITIVE_INFINITY)) @@ -149,6 +207,11 @@ public void testValueOrdering() { .addEqualityGroup(wrap(date1)) .addEqualityGroup(wrap(date2)) + // bson timestamps + .addEqualityGroup(wrap(new BsonTimestamp(123, 4)), wrap(FieldValue.bsonTimestamp(123, 4))) + .addEqualityGroup(wrap(new BsonTimestamp(123, 5))) + .addEqualityGroup(wrap(new BsonTimestamp(124, 0))) + // server timestamps come after all concrete timestamps. .addEqualityGroup(wrap(ServerTimestamps.valueOf(new Timestamp(date1), null))) .addEqualityGroup(wrap(ServerTimestamps.valueOf(new Timestamp(date2), null))) @@ -172,6 +235,16 @@ public void testValueOrdering() { .addEqualityGroup(wrap(blob(0, 1, 2, 4, 3))) .addEqualityGroup(wrap(blob(255))) + // bson binary data + .addEqualityGroup( + wrap(BsonBinaryData.fromBytes(1, new byte[] {})), + wrap(BsonBinaryData.fromByteString(1, ByteString.EMPTY)), + wrap(FieldValue.bsonBinaryData(1, new byte[] {}))) + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(1, new byte[] {0}))) + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(5, new byte[] {1, 2}))) + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(5, new byte[] {1, 2, 3}))) + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(7, new byte[] {1}))) + // resource names .addEqualityGroup(wrap(wrapRef(dbId("p1", "d1"), key("c1/doc1")))) .addEqualityGroup(wrap(wrapRef(dbId("p1", "d1"), key("c1/doc2")))) @@ -180,6 +253,17 @@ public void testValueOrdering() { .addEqualityGroup(wrap(wrapRef(dbId("p1", "d2"), key("c1/doc1")))) .addEqualityGroup(wrap(wrapRef(dbId("p2", "d1"), key("c1/doc1")))) + // bson object id + .addEqualityGroup( + wrap(new BsonObjectId("507f191e810c19729de860ea")), + wrap(FieldValue.bsonObjectId("507f191e810c19729de860ea"))) + .addEqualityGroup(wrap(new BsonObjectId("507f191e810c19729de860eb"))) + // latin small letter e + combining acute accent + latin small letter b + .addEqualityGroup(wrap(new BsonObjectId("e\u0301b"))) + .addEqualityGroup(wrap(new BsonObjectId("æ"))) + // latin small letter e with acute accent + latin small letter a + .addEqualityGroup(wrap(new BsonObjectId("\u00e9a"))) + // geo points .addEqualityGroup(wrap(new GeoPoint(-90, -180))) .addEqualityGroup(wrap(new GeoPoint(-90, 0))) @@ -194,6 +278,16 @@ public void testValueOrdering() { .addEqualityGroup(wrap(new GeoPoint(90, 0))) .addEqualityGroup(wrap(new GeoPoint(90, 180))) + // regex + .addEqualityGroup(wrap(new RegexValue("^foo", "i")), wrap(FieldValue.regex("^foo", "i"))) + .addEqualityGroup(wrap(new RegexValue("^foo", "m"))) + .addEqualityGroup(wrap(new RegexValue("^zoo", "i"))) + // latin small letter e + combining acute accent + latin small letter b + .addEqualityGroup(wrap(new RegexValue("e\u0301b", "i"))) + .addEqualityGroup(wrap(new RegexValue("æ", "i"))) + // latin small letter e with acute accent + latin small letter a + .addEqualityGroup(wrap(new RegexValue("\u00e9a", "i"))) + // arrays .addEqualityGroup(wrap(Arrays.asList("bar"))) .addEqualityGroup(wrap(Arrays.asList("foo", 1))) @@ -212,11 +306,15 @@ public void testValueOrdering() { .addEqualityGroup(wrap(map("foo", 1))) .addEqualityGroup(wrap(map("foo", 2))) .addEqualityGroup(wrap(map("foo", "0"))) + + // MaxKey is last + .addEqualityGroup(wrap(FieldValue.maxKey()), wrap(MaxKey.instance())) .testCompare(); } @Test public void testLowerBound() { + // TODO(mila/BSON): add new bson types new ComparatorTester() // null first .addEqualityGroup(wrap(getLowerBound(TestUtil.wrap((Object) null))), wrap((Object) null)) @@ -276,6 +374,7 @@ public void testLowerBound() { @Test public void testUpperBound() { + // TODO(mila/BSON): add new bson types new ComparatorTester() // null first .addEqualityGroup(wrap((Object) null)) @@ -346,6 +445,22 @@ public void testCanonicalIds() { assertCanonicalId( TestUtil.wrap(map("a", Arrays.asList("b", map("c", new GeoPoint(30, 60))))), "{a:[b,{c:geo(30.0,60.0)}]}"); + + assertCanonicalId( + TestUtil.wrap(FieldValue.regex("a", "b")), "{__regex__:{options:b,pattern:a}}"); + + assertCanonicalId(TestUtil.wrap(FieldValue.bsonObjectId("foo")), "{__oid__:foo}"); + assertCanonicalId( + TestUtil.wrap(FieldValue.bsonTimestamp(1, 2)), + "{__request_timestamp__:{increment:2,seconds:1}}"); + assertCanonicalId((TestUtil.wrap(FieldValue.int32(1))), "{__int__:1}"); + assertCanonicalId( + TestUtil.wrap(FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})), "{__binary__:01010203}"); + assertCanonicalId( + TestUtil.wrap(FieldValue.bsonBinaryData(128, new byte[] {1, 2, 3})), + "{__binary__:80010203}"); + assertCanonicalId(TestUtil.wrap(FieldValue.minKey()), "{__min__:null}"); + assertCanonicalId(TestUtil.wrap(FieldValue.maxKey()), "{__max__:null}"); } @Test diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/remote/RemoteSerializerTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/remote/RemoteSerializerTest.java index 52eec0ac4cd..37cf187bc07 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/remote/RemoteSerializerTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/remote/RemoteSerializerTest.java @@ -329,6 +329,138 @@ public void testEncodesVectorValue() { assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); } + @Test + public void testEncodesBsonObjectIds() { + Value model = wrap(FieldValue.bsonObjectId("foo")); + + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields("__oid__", Value.newBuilder().setStringValue("foo").build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + + @Test + public void testEncodesBsonTimestamps() { + Value model = wrap(FieldValue.bsonTimestamp(12345, 67)); + + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + "__request_timestamp__", + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + "seconds", + Value.newBuilder().setIntegerValue(12345).build()) + .putFields( + "increment", Value.newBuilder().setIntegerValue(67).build()) + .build()) + .build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + + @Test + public void testEncodesBsonBinaryData() { + Value model = wrap(FieldValue.bsonBinaryData(127, new byte[] {1, 2, 3})); + + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + "__binary__", + Value.newBuilder() + .setBytesValue(ByteString.copyFrom(new byte[] {127, 1, 2, 3})) + .build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + + @Test + public void testEncodesRegexValues() { + Value model = wrap(FieldValue.regex("^foo", "i")); + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + "__regex__", + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + "pattern", + Value.newBuilder().setStringValue("^foo").build()) + .putFields( + "options", Value.newBuilder().setStringValue("i").build()) + .build()) + .build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + + @Test + public void testEncodesInt32Values() { + Value model = wrap(FieldValue.int32(12345)); + + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields("__int__", Value.newBuilder().setIntegerValue(12345).build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + + @Test + public void testEncodesMinKey() { + Value model = wrap(FieldValue.minKey()); + + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + "__min__", Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + + @Test + public void testEncodesMaxKey() { + Value model = wrap(FieldValue.maxKey()); + + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + "__max__", Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + @Test public void testEncodeDeleteMutation() { Mutation mutation = deleteMutation("docs/1"); From 98e5e8a43d405b18b98155d450990e2213793678 Mon Sep 17 00:00:00 2001 From: Mila <107142260+milaGGL@users.noreply.github.com> Date: Fri, 25 Apr 2025 14:40:10 -0400 Subject: [PATCH 2/8] Implement indexing for bson types (#576) --- firebase-firestore/api.txt | 15 +- .../firebase/firestore/BsonTypesTest.java | 182 +++- .../google/firebase/firestore/POJOTest.java | 1 + .../firebase/firestore/core/Target.java | 20 +- .../index/FirestoreIndexValueWriter.java | 103 ++- .../firebase/firestore/model/Values.java | 215 ++++- .../firebase/firestore/core/TargetTest.java | 8 +- .../index/FirestoreIndexValueWriterTest.java | 278 ++++++ .../firestore/local/LocalStoreTestCase.java | 17 +- .../local/SQLiteIndexManagerTest.java | 371 ++++++++ .../firestore/local/SQLiteLocalStoreTest.java | 861 +++++++++++++++++- .../firebase/firestore/model/ValuesTest.java | 167 +++- 12 files changed, 2158 insertions(+), 80 deletions(-) diff --git a/firebase-firestore/api.txt b/firebase-firestore/api.txt index d461f24b60c..8ae38dc264b 100644 --- a/firebase-firestore/api.txt +++ b/firebase-firestore/api.txt @@ -47,8 +47,7 @@ package com.google.firebase.firestore { method public byte[] toBytes(); } - public final class BsonBinaryData implements java.lang.Comparable { - method public int compareTo(com.google.firebase.firestore.BsonBinaryData); + public final class BsonBinaryData { method public com.google.protobuf.ByteString dataAsByteString(); method public byte[] dataAsBytes(); method public static com.google.firebase.firestore.BsonBinaryData fromByteString(int, com.google.protobuf.ByteString); @@ -56,15 +55,13 @@ package com.google.firebase.firestore { method public int subtype(); } - public final class BsonObjectId implements java.lang.Comparable { + public final class BsonObjectId { ctor public BsonObjectId(String); - method public int compareTo(com.google.firebase.firestore.BsonObjectId); field public final String! value; } - public final class BsonTimestamp implements java.lang.Comparable { + public final class BsonTimestamp { ctor public BsonTimestamp(long, long); - method public int compareTo(com.google.firebase.firestore.BsonTimestamp); field public final long increment; field public final long seconds; } @@ -339,9 +336,8 @@ package com.google.firebase.firestore { @java.lang.annotation.Retention(java.lang.annotation.RetentionPolicy.RUNTIME) @java.lang.annotation.Target({java.lang.annotation.ElementType.TYPE}) public @interface IgnoreExtraProperties { } - public final class Int32Value implements java.lang.Comparable { + public final class Int32Value { ctor public Int32Value(int); - method public int compareTo(com.google.firebase.firestore.Int32Value); field public final int value; } @@ -541,9 +537,8 @@ package com.google.firebase.firestore { method public java.util.List toObjects(Class, com.google.firebase.firestore.DocumentSnapshot.ServerTimestampBehavior); } - public final class RegexValue implements java.lang.Comparable { + public final class RegexValue { ctor public RegexValue(String, String); - method public int compareTo(com.google.firebase.firestore.RegexValue); field public final String! options; field public final String! pattern; } diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java index 273f8cb35a4..ecd2e80eacb 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java @@ -90,6 +90,54 @@ public void writeAndReadBsonTypes() throws ExecutionException, InterruptedExcept assertEquals(expected, actual.getData()); } + @Test + public void writeAndReadBsonTypeOffline() throws ExecutionException, InterruptedException { + CollectionReference randomColl = testCollectionOnNightly(); + DocumentReference docRef = randomColl.document(); + + waitFor(randomColl.getFirestore().disableNetwork()); + + // Adding docs to cache, do not wait for promise to resolve. + Map expected = new HashMap<>(); + docRef.set( + map( + "bsonObjectId", FieldValue.bsonObjectId("507f191e810c19729de860ea"), + "regex", FieldValue.regex("^foo", "i"), + "bsonTimestamp", FieldValue.bsonTimestamp(1, 2), + "bsonBinary", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}), + "int32", FieldValue.int32(1), + "minKey", FieldValue.minKey(), + "maxKey", FieldValue.maxKey())); + + docRef.update( + map( + "bsonObjectId", + FieldValue.bsonObjectId("507f191e810c19729de860eb"), + "regex", + FieldValue.regex("^foo", "m"), + "bsonTimestamp", + FieldValue.bsonTimestamp(1, 3))); + + expected.put("bsonObjectId", FieldValue.bsonObjectId("507f191e810c19729de860eb")); + expected.put("regex", FieldValue.regex("^foo", "m")); + expected.put("bsonTimestamp", FieldValue.bsonTimestamp(1, 3)); + expected.put("bsonBinary", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})); + expected.put("int32", FieldValue.int32(1)); + expected.put("minKey", FieldValue.minKey()); + expected.put("maxKey", FieldValue.maxKey()); + + DocumentSnapshot actual = waitFor(docRef.get()); + + assertTrue(actual.get("bsonObjectId") instanceof BsonObjectId); + assertTrue(actual.get("regex") instanceof RegexValue); + assertTrue(actual.get("bsonTimestamp") instanceof BsonTimestamp); + assertTrue(actual.get("bsonBinary") instanceof BsonBinaryData); + assertTrue(actual.get("int32") instanceof Int32Value); + assertTrue(actual.get("minKey") instanceof MinKey); + assertTrue(actual.get("maxKey") instanceof MaxKey); + assertEquals(expected, actual.getData()); + } + @Test public void listenToDocumentsWithBsonTypes() throws Throwable { final Semaphore semaphore = new Semaphore(0); @@ -199,8 +247,8 @@ public void listenToDocumentsWithBsonTypes() throws Throwable { } } - /** Verifies that the SDK orders Bson type fields the same way as the backend by comparing the result of Query.get() and - * Query.addSnapshotListener(), as well as the online and offline results */ + // TODO(Mila/BSON): remove the cache population after updating the + // assertSDKQueryResultsConsistentWithBackend @Test public void filterAndOrderBsonObjectIds() throws Exception { Map> docs = @@ -213,12 +261,22 @@ public void filterAndOrderBsonObjectIds() throws Exception { map("key", FieldValue.bsonObjectId("507f191e810c19729de860ec"))); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + // Pre-populate the cache with all docs + waitFor(randomColl.get()); + Query orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) .whereGreaterThan("key", FieldValue.bsonObjectId("507f191e810c19729de860ea")); assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereNotEqualTo("key", FieldValue.bsonObjectId("507f191e810c19729de860eb")); + + assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "a")); } @Test @@ -233,12 +291,22 @@ public void filterAndOrderBsonTimestamps() throws Exception { map("key", FieldValue.bsonTimestamp(2, 1))); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + // Pre-populate the cache with all docs + waitFor(randomColl.get()); + Query orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) .whereGreaterThan("key", FieldValue.bsonTimestamp(1, 1)); assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereNotEqualTo("key", FieldValue.bsonTimestamp(1, 2)); + + assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "a")); } @Test @@ -253,12 +321,22 @@ public void filterAndOrderBsonBinaryData() throws Exception { map("key", FieldValue.bsonBinaryData(2, new byte[] {1, 2, 2}))); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + // Pre-populate the cache with all docs + waitFor(randomColl.get()); + Query orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) .whereGreaterThan("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})); assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereNotEqualTo("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4})); + + assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "a")); } @Test @@ -270,12 +348,22 @@ public void filterAndOrderRegex() throws Exception { "c", map("key", FieldValue.regex("^baz", "i"))); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + // Pre-populate the cache with all docs + waitFor(randomColl.get()); + Query orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) .whereGreaterThan("key", FieldValue.regex("^bar", "i")); assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereNotEqualTo("key", FieldValue.regex("^bar", "m")); + + assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "a")); } @Test @@ -287,12 +375,20 @@ public void filterAndOrderInt32() throws Exception { "c", map("key", FieldValue.int32(2))); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + // Pre-populate the cache with all docs + waitFor(randomColl.get()); + Query orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) - .whereGreaterThanOrEqualTo("key", FieldValue.int32(1)); + .whereGreaterThan("key", FieldValue.int32(-1)); assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); + + orderedQuery = + randomColl.orderBy("key", Direction.DESCENDING).whereNotEqualTo("key", FieldValue.int32(1)); + + assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "a")); } @Test @@ -301,10 +397,15 @@ public void filterAndOrderMinKey() throws Exception { map( "a", map("key", FieldValue.minKey()), "b", map("key", FieldValue.minKey()), - "c", map("key", FieldValue.maxKey())); + "c", map("key", null), + "d", map("key", 1L), + "e", map("key", FieldValue.maxKey())); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); - Query orderedQuery = + // Pre-populate the cache with all docs + waitFor(randomColl.get()); + + Query query = randomColl .orderBy( "key", @@ -312,7 +413,23 @@ public void filterAndOrderMinKey() throws Exception { .DESCENDING) // minKeys are equal, would sort by documentId as secondary order .whereEqualTo("key", FieldValue.minKey()); - assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("b", "a")); + assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("b", "a")); + + // TODO(Mila/BSON): uncomment this test when null value inclusion is fixed + // query = randomColl.whereNotEqualTo("key", FieldValue.minKey()); + // assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("d", "e")); + + query = randomColl.whereGreaterThanOrEqualTo("key", FieldValue.minKey()); + assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("a", "b")); + + query = randomColl.whereLessThanOrEqualTo("key", FieldValue.minKey()); + assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("a", "b")); + + query = randomColl.whereGreaterThan("key", FieldValue.minKey()); + assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList()); + + query = randomColl.whereGreaterThan("key", FieldValue.minKey()); + assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList()); } @Test @@ -320,11 +437,16 @@ public void filterAndOrderMaxKey() throws Exception { Map> docs = map( "a", map("key", FieldValue.minKey()), - "b", map("key", FieldValue.maxKey()), - "c", map("key", FieldValue.maxKey())); + "b", map("key", 1L), + "c", map("key", FieldValue.maxKey()), + "d", map("key", FieldValue.maxKey()), + "e", map("key", null)); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); - Query orderedQuery = + // Pre-populate the cache with all docs + waitFor(randomColl.get()); + + Query query = randomColl .orderBy( "key", @@ -332,7 +454,44 @@ public void filterAndOrderMaxKey() throws Exception { .DESCENDING) // maxKeys are equal, would sort by documentId as secondary order .whereEqualTo("key", FieldValue.maxKey()); - assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); + assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("d", "c")); + + // TODO(Mila/BSON): uncomment this test when null value inclusion is fixed + // query = randomColl.whereNotEqualTo("key", FieldValue.maxKey()); + // assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("a", "b")); + + query = randomColl.whereGreaterThanOrEqualTo("key", FieldValue.maxKey()); + assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("c", "d")); + + query = randomColl.whereLessThanOrEqualTo("key", FieldValue.maxKey()); + assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("c", "d")); + + query = randomColl.whereLessThan("key", FieldValue.maxKey()); + assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList()); + + query = randomColl.whereGreaterThan("key", FieldValue.maxKey()); + assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList()); + } + + @Test + public void filterNullValueWithBsonTypes() throws Exception { + Map> docs = + map( + "a", map("key", FieldValue.minKey()), + "b", map("key", null), + "c", map("key", null), + "d", map("key", 1L), + "e", map("key", FieldValue.maxKey())); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + // Pre-populate the cache with all docs + waitFor(randomColl.get()); + + Query query = randomColl.whereEqualTo("key", null); + assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("b", "c")); + + query = randomColl.whereNotEqualTo("key", null); + assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("a", "d", "e")); } @Test @@ -379,6 +538,9 @@ public void orderBsonTypesTogether() throws Exception { map("key", FieldValue.maxKey())); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + // Pre-populate the cache with all docs + waitFor(randomColl.get()); + Query orderedQuery = randomColl.orderBy("key", Direction.DESCENDING); List expectedDocs = Arrays.asList( diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java index 8b3f48c06a1..90d707b089c 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java @@ -358,6 +358,7 @@ public void testDocumentIdAnnotation() { assertEquals(reference.getId(), readFromStore.getDocReferenceId()); } + // TODO(Mila/BSON): this test is flaky due to a bug in the backend. @Test public void testSetMerge() { CollectionReference collection = testCollectionOnNightly(); diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/core/Target.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/core/Target.java index d058e15659e..75879d77b3e 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/core/Target.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/core/Target.java @@ -16,8 +16,8 @@ import static com.google.firebase.firestore.core.FieldFilter.Operator.ARRAY_CONTAINS; import static com.google.firebase.firestore.core.FieldFilter.Operator.ARRAY_CONTAINS_ANY; -import static com.google.firebase.firestore.model.Values.MAX_VALUE; -import static com.google.firebase.firestore.model.Values.MIN_VALUE; +import static com.google.firebase.firestore.model.Values.INTERNAL_MAX_VALUE; +import static com.google.firebase.firestore.model.Values.INTERNAL_MIN_VALUE; import static com.google.firebase.firestore.model.Values.lowerBoundCompare; import static com.google.firebase.firestore.model.Values.upperBoundCompare; @@ -184,7 +184,7 @@ private List getFieldFiltersForPath(FieldPath path) { /** * Returns a lower bound of field values that can be used as a starting point to scan the index - * defined by {@code fieldIndex}. Returns {@link Values#MIN_VALUE} if no lower bound exists. + * defined by {@code fieldIndex}. Returns {@link Values#INTERNAL_MIN_VALUE} if no lower bound exists. */ public Bound getLowerBound(FieldIndex fieldIndex) { List values = new ArrayList<>(); @@ -206,7 +206,7 @@ public Bound getLowerBound(FieldIndex fieldIndex) { /** * Returns an upper bound of field values that can be used as an ending point when scanning the - * index defined by {@code fieldIndex}. Returns {@link Values#MAX_VALUE} if no upper bound exists. + * index defined by {@code fieldIndex}. Returns {@link Values#INTERNAL_MAX_VALUE} if no upper bound exists. */ public Bound getUpperBound(FieldIndex fieldIndex) { List values = new ArrayList<>(); @@ -235,12 +235,12 @@ public Bound getUpperBound(FieldIndex fieldIndex) { */ private Pair getAscendingBound( FieldIndex.Segment segment, @Nullable Bound bound) { - Value segmentValue = MIN_VALUE; + Value segmentValue = INTERNAL_MIN_VALUE; boolean segmentInclusive = true; // Process all filters to find a value for the current field segment for (FieldFilter fieldFilter : getFieldFiltersForPath(segment.getFieldPath())) { - Value filterValue = MIN_VALUE; + Value filterValue = INTERNAL_MIN_VALUE; boolean filterInclusive = true; switch (fieldFilter.getOperator()) { @@ -259,7 +259,7 @@ private Pair getAscendingBound( break; case NOT_EQUAL: case NOT_IN: - filterValue = Values.MIN_VALUE; + filterValue = Values.MIN_KEY_VALUE; break; default: // Remaining filters cannot be used as bound. @@ -300,12 +300,12 @@ private Pair getAscendingBound( */ private Pair getDescendingBound( FieldIndex.Segment segment, @Nullable Bound bound) { - Value segmentValue = MAX_VALUE; + Value segmentValue = INTERNAL_MAX_VALUE; boolean segmentInclusive = true; // Process all filters to find a value for the current field segment for (FieldFilter fieldFilter : getFieldFiltersForPath(segment.getFieldPath())) { - Value filterValue = MAX_VALUE; + Value filterValue = INTERNAL_MAX_VALUE; boolean filterInclusive = true; switch (fieldFilter.getOperator()) { @@ -325,7 +325,7 @@ private Pair getDescendingBound( break; case NOT_EQUAL: case NOT_IN: - filterValue = Values.MAX_VALUE; + filterValue = Values.MAX_KEY_VALUE; break; default: // Remaining filters cannot be used as bound. diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/index/FirestoreIndexValueWriter.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/index/FirestoreIndexValueWriter.java index f275634957a..87a0ef0221b 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/index/FirestoreIndexValueWriter.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/index/FirestoreIndexValueWriter.java @@ -19,31 +19,41 @@ import com.google.firestore.v1.ArrayValue; import com.google.firestore.v1.MapValue; import com.google.firestore.v1.Value; +import com.google.protobuf.ByteString; import com.google.protobuf.Timestamp; import com.google.type.LatLng; +import java.util.ArrayList; +import java.util.List; import java.util.Map; /** Firestore index value writer. */ public class FirestoreIndexValueWriter { - // Note: This code is copied from the backend. Code that is not used by Firestore was removed. + // Note: This file is copied from the backend. Code that is not used by + // Firestore was removed. Code that has different behavior was modified. // The client SDK only supports references to documents from the same database. We can skip the // first five segments. public static final int DOCUMENT_NAME_OFFSET = 5; public static final int INDEX_TYPE_NULL = 5; + public static final int INDEX_TYPE_MIN_KEY = 7; public static final int INDEX_TYPE_BOOLEAN = 10; public static final int INDEX_TYPE_NAN = 13; public static final int INDEX_TYPE_NUMBER = 15; public static final int INDEX_TYPE_TIMESTAMP = 20; + public static final int INDEX_TYPE_BSON_TIMESTAMP = 22; public static final int INDEX_TYPE_STRING = 25; public static final int INDEX_TYPE_BLOB = 30; + public static final int INDEX_TYPE_BSON_BINARY = 31; public static final int INDEX_TYPE_REFERENCE = 37; + public static final int INDEX_TYPE_BSON_OBJECT_ID = 43; public static final int INDEX_TYPE_GEOPOINT = 45; + public static final int INDEX_TYPE_REGEX = 47; public static final int INDEX_TYPE_ARRAY = 50; public static final int INDEX_TYPE_VECTOR = 53; public static final int INDEX_TYPE_MAP = 55; public static final int INDEX_TYPE_REFERENCE_SEGMENT = 60; + public static final int INDEX_TYPE_MAX_KEY = 999; // A terminator that indicates that a truncatable value was not truncated. // This must be smaller than all other type labels. @@ -112,13 +122,43 @@ private void writeIndexValueAux(Value indexValue, DirectionalIndexByteEncoder en encoder.writeDouble(geoPoint.getLongitude()); break; case MAP_VALUE: - if (Values.isMaxValue(indexValue)) { + Values.MapRepresentation mapType = Values.detectMapRepresentation(indexValue); + if (mapType.equals(Values.MapRepresentation.INTERNAL_MAX)) { writeValueTypeLabel(encoder, Integer.MAX_VALUE); break; - } else if (Values.isVectorValue(indexValue)) { + } else if (mapType.equals(Values.MapRepresentation.VECTOR)) { writeIndexVector(indexValue.getMapValue(), encoder); break; + } else if (mapType.equals(Values.MapRepresentation.REGEX)) { + writeIndexRegex(indexValue.getMapValue(), encoder); + break; + } else if (mapType.equals(Values.MapRepresentation.BSON_TIMESTAMP)) { + writeIndexBsonTimestamp(indexValue.getMapValue(), encoder); + break; + } else if (mapType.equals(Values.MapRepresentation.BSON_OBJECT_ID)) { + writeIndexBsonObjectId(indexValue.getMapValue(), encoder); + break; + } else if (mapType.equals(Values.MapRepresentation.BSON_BINARY)) { + writeIndexBsonBinaryData(indexValue.getMapValue(), encoder); + break; + } else if (mapType.equals(Values.MapRepresentation.INT32)) { + writeValueTypeLabel(encoder, INDEX_TYPE_NUMBER); + // Double and Int32 sort the same + encoder.writeDouble( + indexValue + .getMapValue() + .getFieldsMap() + .get(Values.RESERVED_INT32_KEY) + .getIntegerValue()); + break; + } else if (mapType.equals(Values.MapRepresentation.MIN_KEY)) { + writeValueTypeLabel(encoder, INDEX_TYPE_MIN_KEY); + break; + } else if (mapType.equals(Values.MapRepresentation.MAX_KEY)) { + writeValueTypeLabel(encoder, INDEX_TYPE_MAX_KEY); + break; } + writeIndexMap(indexValue.getMapValue(), encoder); writeTruncationMarker(encoder); break; @@ -157,6 +197,53 @@ private void writeIndexVector(MapValue mapIndexValue, DirectionalIndexByteEncode this.writeIndexValueAux(map.get(key), encoder); } + private void writeIndexRegex(MapValue mapIndexValue, DirectionalIndexByteEncoder encoder) { + writeValueTypeLabel(encoder, INDEX_TYPE_REGEX); + + Map fields = + mapIndexValue.getFieldsMap().get(Values.RESERVED_REGEX_KEY).getMapValue().getFieldsMap(); + encoder.writeString(fields.get(Values.RESERVED_REGEX_PATTERN_KEY).getStringValue()); + encoder.writeString(fields.get(Values.RESERVED_REGEX_OPTIONS_KEY).getStringValue()); + writeTruncationMarker(encoder); + } + + private void writeIndexBsonTimestamp(MapValue mapValue, DirectionalIndexByteEncoder encoder) { + writeValueTypeLabel(encoder, INDEX_TYPE_BSON_TIMESTAMP); + + Map timestampFields = + mapValue + .getFieldsMap() + .get(Values.RESERVED_BSON_TIMESTAMP_KEY) + .getMapValue() + .getFieldsMap(); + + long unsignedSeconds = + timestampFields.get(Values.RESERVED_BSON_TIMESTAMP_SECONDS_KEY).getIntegerValue(); + long unsignedIncrement = + timestampFields.get(Values.RESERVED_BSON_TIMESTAMP_INCREMENT_KEY).getIntegerValue(); + + // BSON Timestamps are encoded as a 64-bit long with the lower 32 bits being the increment + // and the upper 32 bits being the seconds + long value = (unsignedSeconds << 32) | (unsignedIncrement & 0xFFFFFFFFL); + + encoder.writeLong(value); + } + + private void writeIndexBsonObjectId(MapValue mapValue, DirectionalIndexByteEncoder encoder) { + writeValueTypeLabel(encoder, INDEX_TYPE_BSON_OBJECT_ID); + + String oid = mapValue.getFieldsMap().get(Values.RESERVED_OBJECT_ID_KEY).getStringValue(); + encoder.writeBytes(ByteString.copyFrom(oid.getBytes())); + } + + private void writeIndexBsonBinaryData(MapValue mapValue, DirectionalIndexByteEncoder encoder) { + writeValueTypeLabel(encoder, INDEX_TYPE_BSON_BINARY); + + encoder.writeBytes( + mapValue.getFieldsMap().get(Values.RESERVED_BSON_BINARY_KEY).getBytesValue()); + writeTruncationMarker(encoder); + } + private void writeIndexMap(MapValue mapIndexValue, DirectionalIndexByteEncoder encoder) { writeValueTypeLabel(encoder, INDEX_TYPE_MAP); for (Map.Entry entry : mapIndexValue.getFieldsMap().entrySet()) { @@ -177,7 +264,15 @@ private void writeIndexArray(ArrayValue arrayIndexValue, DirectionalIndexByteEnc private void writeIndexEntityRef(String referenceValue, DirectionalIndexByteEncoder encoder) { writeValueTypeLabel(encoder, INDEX_TYPE_REFERENCE); - ResourcePath path = ResourcePath.fromString(referenceValue); + List segments = new ArrayList<>(); + String[] parts = referenceValue.split("/"); + for (String part : parts) { + if (!part.isEmpty()) { + segments.add(part); + } + } + ResourcePath path = ResourcePath.fromSegments(segments); + int numSegments = path.length(); for (int index = DOCUMENT_NAME_OFFSET; index < numSegments; ++index) { String segment = path.getSegment(index); diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.java index 79b715182e0..71456829ffb 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.java @@ -70,10 +70,10 @@ public class Values { public static final Value NAN_VALUE = Value.newBuilder().setDoubleValue(Double.NaN).build(); public static final Value NULL_VALUE = Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build(); - public static final Value MIN_VALUE = NULL_VALUE; + public static final Value INTERNAL_MIN_VALUE = NULL_VALUE; public static final Value MAX_VALUE_TYPE = Value.newBuilder().setStringValue(RESERVED_MAX_KEY).build(); - public static final Value MAX_VALUE = + public static final Value INTERNAL_MAX_VALUE = Value.newBuilder() .setMapValue(MapValue.newBuilder().putFields(TYPE_KEY, MAX_VALUE_TYPE)) .build(); @@ -81,6 +81,7 @@ public class Values { public static final Value VECTOR_VALUE_TYPE = Value.newBuilder().setStringValue(RESERVED_VECTOR_KEY).build(); public static final String VECTOR_MAP_VECTORS_KEY = "value"; + private static final Value MIN_VECTOR_VALUE = Value.newBuilder() .setMapValue( @@ -93,7 +94,7 @@ public class Values { /** * The order of types in Firestore. This order is based on the backend's ordering, but modified to - * support server timestamps and {@link #MAX_VALUE}. + * support server timestamps and {@link #INTERNAL_MAX_VALUE}. */ public static final int TYPE_ORDER_NULL = 0; @@ -672,6 +673,80 @@ public static Value refValue(DatabaseId databaseId, DocumentKey key) { public static Value MIN_MAP = Value.newBuilder().setMapValue(MapValue.getDefaultInstance()).build(); + public static Value MIN_KEY_VALUE = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + RESERVED_MIN_KEY, + Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build())) + .build(); + + public static Value MAX_KEY_VALUE = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + RESERVED_MAX_KEY, + Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build())) + .build(); + + public static Value MIN_BSON_OBJECT_ID_VALUE = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields(RESERVED_OBJECT_ID_KEY, Value.newBuilder().setStringValue("").build())) + .build(); + + public static Value MIN_BSON_TIMESTAMP_VALUE = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + RESERVED_BSON_TIMESTAMP_KEY, + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + // Both seconds and increment are 32 bit unsigned integers + .putFields( + RESERVED_BSON_TIMESTAMP_SECONDS_KEY, + Value.newBuilder().setIntegerValue(0).build()) + .putFields( + RESERVED_BSON_TIMESTAMP_INCREMENT_KEY, + Value.newBuilder().setIntegerValue(0).build())) + .build())) + .build(); + + public static Value MIN_BSON_BINARY_VALUE = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + RESERVED_BSON_BINARY_KEY, + // bsonBinaryValue should have at least one byte as subtype + Value.newBuilder() + .setBytesValue(ByteString.copyFrom(new byte[] {0})) + .build())) + .build(); + + public static Value MIN_REGEX_VALUE = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + RESERVED_REGEX_KEY, + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields( + RESERVED_REGEX_PATTERN_KEY, + Value.newBuilder().setStringValue("").build()) + .putFields( + RESERVED_REGEX_OPTIONS_KEY, + Value.newBuilder().setStringValue("").build())) + .build())) + .build(); + /** Returns the lowest value for the given value type (inclusive). */ public static Value getLowerBound(Value value) { switch (value.getValueTypeCase()) { @@ -695,10 +770,27 @@ public static Value getLowerBound(Value value) { case ARRAY_VALUE: return MIN_ARRAY; case MAP_VALUE: + MapRepresentation mapType = detectMapRepresentation(value); // VectorValue sorts after ArrayValue and before an empty MapValue - if (isVectorValue(value)) { + if (mapType.equals(MapRepresentation.VECTOR)) { return MIN_VECTOR_VALUE; + } else if (mapType.equals(MapRepresentation.BSON_OBJECT_ID)) { + return MIN_BSON_OBJECT_ID_VALUE; + } else if (mapType.equals(MapRepresentation.BSON_TIMESTAMP)) { + return MIN_BSON_TIMESTAMP_VALUE; + } else if (mapType.equals(MapRepresentation.BSON_BINARY)) { + return MIN_BSON_BINARY_VALUE; + } else if (mapType.equals(MapRepresentation.REGEX)) { + return MIN_REGEX_VALUE; + } else if (mapType.equals(MapRepresentation.INT32)) { + // int32Value is treated the same as integerValue and doubleValue + return MIN_NUMBER; + } else if (mapType.equals(MapRepresentation.MIN_KEY)) { + return MIN_KEY_VALUE; + } else if (mapType.equals(MapRepresentation.MAX_KEY)) { + return MAX_KEY_VALUE; } + return MIN_MAP; default: throw new IllegalArgumentException("Unknown value type: " + value.getValueTypeCase()); @@ -709,43 +801,116 @@ public static Value getLowerBound(Value value) { public static Value getUpperBound(Value value) { switch (value.getValueTypeCase()) { case NULL_VALUE: - return MIN_BOOLEAN; + return MIN_KEY_VALUE; case BOOLEAN_VALUE: return MIN_NUMBER; case INTEGER_VALUE: case DOUBLE_VALUE: return MIN_TIMESTAMP; case TIMESTAMP_VALUE: - return MIN_STRING; + return MIN_BSON_TIMESTAMP_VALUE; case STRING_VALUE: return MIN_BYTES; case BYTES_VALUE: - return MIN_REFERENCE; + return MIN_BSON_BINARY_VALUE; case REFERENCE_VALUE: - return MIN_GEO_POINT; + return MIN_BSON_OBJECT_ID_VALUE; case GEO_POINT_VALUE: - return MIN_ARRAY; + return MIN_REGEX_VALUE; case ARRAY_VALUE: return MIN_VECTOR_VALUE; case MAP_VALUE: - // VectorValue sorts after ArrayValue and before an empty MapValue - if (isVectorValue(value)) { + MapRepresentation mapType = detectMapRepresentation(value); + if (mapType.equals(MapRepresentation.VECTOR)) { return MIN_MAP; } - return MAX_VALUE; + if (mapType.equals(MapRepresentation.BSON_OBJECT_ID)) { + return MIN_GEO_POINT; + } + if (mapType.equals(MapRepresentation.BSON_TIMESTAMP)) { + return MIN_STRING; + } + if (mapType.equals(MapRepresentation.BSON_BINARY)) { + return MIN_REFERENCE; + } + if (mapType.equals(MapRepresentation.REGEX)) { + return MIN_ARRAY; + } + if (mapType.equals(MapRepresentation.INT32)) { + // int32Value is treated the same as integerValue and doubleValue + return MIN_TIMESTAMP; + } + if (mapType.equals(MapRepresentation.MIN_KEY)) { + return MIN_BOOLEAN; + } + if (mapType.equals(MapRepresentation.MAX_KEY)) { + return INTERNAL_MAX_VALUE; + } + + return MAX_KEY_VALUE; default: throw new IllegalArgumentException("Unknown value type: " + value.getValueTypeCase()); } } - /** Returns true if the Value represents the canonical {@link #MAX_VALUE} . */ - public static boolean isMaxValue(Value value) { - return MAX_VALUE_TYPE.equals(value.getMapValue().getFieldsMap().get(TYPE_KEY)); + static boolean isMinKey(Map fields) { + return fields.size() == 1 + && fields.containsKey(RESERVED_MIN_KEY) + && fields.get(RESERVED_MIN_KEY).hasNullValue(); + } + + static boolean isMaxKey(Map fields) { + return fields.size() == 1 + && fields.containsKey(RESERVED_MAX_KEY) + && fields.get(RESERVED_MAX_KEY).hasNullValue(); + } + + static boolean isInt32Value(Map fields) { + return fields.size() == 1 + && fields.containsKey(RESERVED_INT32_KEY) + && fields.get(RESERVED_INT32_KEY).hasIntegerValue(); + } + + static boolean isBsonObjectId(Map fields) { + return fields.size() == 1 + && fields.containsKey(RESERVED_OBJECT_ID_KEY) + && fields.get(RESERVED_OBJECT_ID_KEY).hasStringValue(); + } + + static boolean isBsonBinaryData(Map fields) { + return fields.size() == 1 + && fields.containsKey(RESERVED_BSON_BINARY_KEY) + && fields.get(RESERVED_BSON_BINARY_KEY).hasBytesValue(); } - /** Returns true if the Value represents a VectorValue . */ - public static boolean isVectorValue(Value value) { - return VECTOR_VALUE_TYPE.equals(value.getMapValue().getFieldsMap().get(TYPE_KEY)); + static boolean isRegexValue(Map fields) { + if (fields.size() == 1 + && fields.containsKey(RESERVED_REGEX_KEY) + && fields.get(RESERVED_REGEX_KEY).hasMapValue()) { + MapValue innerMapValue = fields.get(RESERVED_REGEX_KEY).getMapValue(); + Map values = innerMapValue.getFieldsMap(); + return innerMapValue.getFieldsCount() == 2 + && values.containsKey(RESERVED_REGEX_PATTERN_KEY) + && values.containsKey(RESERVED_REGEX_OPTIONS_KEY) + && values.get(RESERVED_REGEX_PATTERN_KEY).hasStringValue() + && values.get(RESERVED_REGEX_OPTIONS_KEY).hasStringValue(); + } + return false; + } + + static boolean isBsonTimestamp(Map fields) { + if (fields.size() == 1 + && fields.containsKey(RESERVED_BSON_TIMESTAMP_KEY) + && fields.get(RESERVED_BSON_TIMESTAMP_KEY).hasMapValue()) { + MapValue innerMapValue = fields.get(RESERVED_BSON_TIMESTAMP_KEY).getMapValue(); + Map values = innerMapValue.getFieldsMap(); + return innerMapValue.getFieldsCount() == 2 + && values.containsKey(RESERVED_BSON_TIMESTAMP_SECONDS_KEY) + && values.containsKey(RESERVED_BSON_TIMESTAMP_INCREMENT_KEY) + && values.get(RESERVED_BSON_TIMESTAMP_SECONDS_KEY).hasIntegerValue() + && values.get(RESERVED_BSON_TIMESTAMP_INCREMENT_KEY).hasIntegerValue(); + } + return false; } public enum MapRepresentation { @@ -793,25 +958,25 @@ public static MapRepresentation detectMapRepresentation(Value value) { } // Check for BSON-related mappings - if (fields.containsKey(RESERVED_REGEX_KEY)) { + if (isRegexValue(fields)) { return MapRepresentation.REGEX; } - if (fields.containsKey(RESERVED_OBJECT_ID_KEY)) { + if (isBsonObjectId(fields)) { return MapRepresentation.BSON_OBJECT_ID; } - if (fields.containsKey(RESERVED_INT32_KEY)) { + if (isInt32Value(fields)) { return MapRepresentation.INT32; } - if (fields.containsKey(RESERVED_BSON_TIMESTAMP_KEY)) { + if (isBsonTimestamp(fields)) { return MapRepresentation.BSON_TIMESTAMP; } - if (fields.containsKey(RESERVED_BSON_BINARY_KEY)) { + if (isBsonBinaryData(fields)) { return MapRepresentation.BSON_BINARY; } - if (fields.containsKey(RESERVED_MIN_KEY)) { + if (isMinKey(fields)) { return MapRepresentation.MIN_KEY; } - if (fields.containsKey(RESERVED_MAX_KEY)) { + if (isMaxKey(fields)) { return MapRepresentation.MAX_KEY; } diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/core/TargetTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/core/TargetTest.java index bad5ee427fa..f5d89ef55d8 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/core/TargetTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/core/TargetTest.java @@ -151,12 +151,12 @@ public void orderByQueryBound() { Bound lowerBound = target.getLowerBound(index); assertEquals(1, lowerBound.getPosition().size()); - assertTrue(Values.equals(lowerBound.getPosition().get(0), Values.MIN_VALUE)); + assertTrue(Values.equals(lowerBound.getPosition().get(0), Values.INTERNAL_MIN_VALUE)); assertTrue(lowerBound.isInclusive()); Bound upperBound = target.getUpperBound(index); assertEquals(1, upperBound.getPosition().size()); - assertTrue(Values.equals(upperBound.getPosition().get(0), Values.MAX_VALUE)); + assertTrue(Values.equals(upperBound.getPosition().get(0), Values.INTERNAL_MAX_VALUE)); assertTrue(upperBound.isInclusive()); } @@ -183,7 +183,7 @@ public void startAtQueryBound() { Bound upperBound = target.getUpperBound(index); assertEquals(1, upperBound.getPosition().size()); - assertTrue(Values.equals(upperBound.getPosition().get(0), Values.MAX_VALUE)); + assertTrue(Values.equals(upperBound.getPosition().get(0), Values.INTERNAL_MAX_VALUE)); assertTrue(upperBound.isInclusive()); } @@ -259,7 +259,7 @@ public void endAtQueryBound() { Bound lowerBound = target.getLowerBound(index); assertEquals(1, lowerBound.getPosition().size()); - assertTrue(Values.equals(lowerBound.getPosition().get(0), Values.MIN_VALUE)); + assertTrue(Values.equals(lowerBound.getPosition().get(0), Values.INTERNAL_MIN_VALUE)); assertTrue(lowerBound.isInclusive()); Bound upperBound = target.getUpperBound(index); diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/index/FirestoreIndexValueWriterTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/index/FirestoreIndexValueWriterTest.java index 6acb576666a..a8cc48daaa2 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/index/FirestoreIndexValueWriterTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/index/FirestoreIndexValueWriterTest.java @@ -20,6 +20,7 @@ import com.google.firebase.firestore.model.DatabaseId; import com.google.firebase.firestore.model.FieldIndex; import com.google.firestore.v1.Value; +import com.google.protobuf.ByteString; import java.util.concurrent.ExecutionException; import org.junit.Assert; import org.junit.Test; @@ -100,4 +101,281 @@ public void writeIndexValueSupportsEmptyVector() { // Assert actual and expected encodings are equal Assert.assertArrayEquals(actualBytes, expectedBytes); } + + @Test + public void writeIndexValueSupportsBsonObjectId() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(FieldValue.bsonObjectId("507f191e810c19729de860ea")); + + // Encode an actual ObjectIdValue + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_BSON_OBJECT_ID); // ObjectId type + expectedDirectionalEncoder.writeBytes( + ByteString.copyFrom("507f191e810c19729de860ea".getBytes())); // ObjectId value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsBsonBinaryData() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})); + + // Encode an actual BSONBinaryDataValue + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_BSON_BINARY); // BSONBinaryData type + expectedDirectionalEncoder.writeBytes( + ByteString.copyFrom(new byte[] {1, 1, 2, 3})); // BSONBinaryData value + expectedDirectionalEncoder.writeLong(FirestoreIndexValueWriter.NOT_TRUNCATED); + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsBsonBinaryWithEmptyData() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(FieldValue.bsonBinaryData(1, new byte[] {})); + + // Encode an actual BSONBinaryDataValue + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_BSON_BINARY); // BSONBinaryData type + expectedDirectionalEncoder.writeBytes( + ByteString.copyFrom(new byte[] {1})); // BSONBinaryData value + expectedDirectionalEncoder.writeLong(FirestoreIndexValueWriter.NOT_TRUNCATED); + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsBsonTimestamp() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(FieldValue.bsonTimestamp(1, 2)); + + // Encode an actual BSONTimestampValue + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_BSON_TIMESTAMP); // BSONTimestamp type + expectedDirectionalEncoder.writeLong(1L << 32 | 2 & 0xFFFFFFFFL); // BSONTimestamp value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsLargestBsonTimestamp() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(FieldValue.bsonTimestamp(4294967295L, 4294967295L)); + + // Encode an actual BSONTimestampValue + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_BSON_TIMESTAMP); // BSONTimestamp type + expectedDirectionalEncoder.writeLong( + 4294967295L << 32 | 4294967295L & 0xFFFFFFFFL); // BSONTimestamp value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsSmallestBsonTimestamp() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(FieldValue.bsonTimestamp(0, 0)); + + // Encode an actual BSONTimestampValue + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_BSON_TIMESTAMP); // BSONTimestamp type + expectedDirectionalEncoder.writeLong(0L << 32 | 0 & 0xFFFFFFFFL); // BSONTimestamp value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsRegex() throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(FieldValue.regex("^foo", "i")); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong(FirestoreIndexValueWriter.INDEX_TYPE_REGEX); // Regex type + expectedDirectionalEncoder.writeString("^foo"); // Regex pattern + expectedDirectionalEncoder.writeString("i"); // Regex options + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.NOT_TRUNCATED); // writeTruncationMarker + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsInt32() throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(FieldValue.int32(1)); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_NUMBER); // Number type + expectedDirectionalEncoder.writeDouble(1); // Number value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsLargestInt32() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(FieldValue.int32(2147483647)); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_NUMBER); // Number type + expectedDirectionalEncoder.writeDouble(2147483647); // Number value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsSmallestInt32() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(FieldValue.int32(-2147483648)); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_NUMBER); // Number type + expectedDirectionalEncoder.writeDouble(-2147483648); // Number value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsMinKey() throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(FieldValue.minKey()); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_MIN_KEY); // MinKey type + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsMaxKey() throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(FieldValue.maxKey()); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_MAX_KEY); // MaxKey type + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + Assert.assertArrayEquals(actualBytes, expectedBytes); + } } diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/LocalStoreTestCase.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/LocalStoreTestCase.java index 21823b1af42..499a60a734a 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/LocalStoreTestCase.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/LocalStoreTestCase.java @@ -63,6 +63,7 @@ import com.google.firebase.firestore.core.Target; import com.google.firebase.firestore.model.Document; import com.google.firebase.firestore.model.DocumentKey; +import com.google.firebase.firestore.model.DocumentSet; import com.google.firebase.firestore.model.FieldIndex; import com.google.firebase.firestore.model.MutableDocument; import com.google.firebase.firestore.model.ResourcePath; @@ -111,7 +112,7 @@ public abstract class LocalStoreTestCase { private List batches; private @Nullable ImmutableSortedMap lastChanges; - private @Nullable QueryResult lastQueryResult; + private @Nullable DocumentSet lastQueryResult; private int lastTargetId; abstract Persistence getPersistence(); @@ -214,7 +215,11 @@ protected int allocateQuery(Query query) { protected void executeQuery(Query query) { resetPersistenceStats(); - lastQueryResult = localStore.executeQuery(query, /* usePreviousResults= */ true); + QueryResult queryResult = localStore.executeQuery(query, /* usePreviousResults= */ true); + lastQueryResult = DocumentSet.emptySet(query.comparator()); + for (Entry entry : queryResult.getDocuments()) { + lastQueryResult = lastQueryResult.add(entry.getValue()); + } } protected void setIndexAutoCreationEnabled(boolean isEnabled) { @@ -310,8 +315,12 @@ private void assertNotContains(String keyPathString) { protected void assertQueryReturned(String... keys) { assertNotNull(lastQueryResult); - ImmutableSortedMap documents = lastQueryResult.getDocuments(); - assertThat(keys(documents)).containsExactly(Arrays.stream(keys).map(TestUtil::key).toArray()); + assertEquals(lastQueryResult.size(), keys.length); + List expectedKeys = + Arrays.stream(keys).map(TestUtil::key).collect(Collectors.toList()); + List actualKeys = + lastQueryResult.toList().stream().map(Document::getKey).collect(Collectors.toList()); + assertEquals(expectedKeys, actualKeys); } private void assertQueryDocumentMapping(int targetId, DocumentKey... keys) { diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteIndexManagerTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteIndexManagerTest.java index 57b0fc36ae4..896c6edf6ff 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteIndexManagerTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteIndexManagerTest.java @@ -18,6 +18,7 @@ import static com.google.firebase.firestore.model.FieldIndex.IndexState; import static com.google.firebase.firestore.model.FieldIndex.Segment.Kind; import static com.google.firebase.firestore.testutil.TestUtil.andFilters; +import static com.google.firebase.firestore.testutil.TestUtil.blob; import static com.google.firebase.firestore.testutil.TestUtil.bound; import static com.google.firebase.firestore.testutil.TestUtil.deletedDoc; import static com.google.firebase.firestore.testutil.TestUtil.doc; @@ -30,12 +31,16 @@ import static com.google.firebase.firestore.testutil.TestUtil.orderBy; import static com.google.firebase.firestore.testutil.TestUtil.path; import static com.google.firebase.firestore.testutil.TestUtil.query; +import static com.google.firebase.firestore.testutil.TestUtil.ref; import static com.google.firebase.firestore.testutil.TestUtil.version; import static com.google.firebase.firestore.testutil.TestUtil.wrap; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; +import com.google.firebase.Timestamp; +import com.google.firebase.firestore.FieldValue; +import com.google.firebase.firestore.GeoPoint; import com.google.firebase.firestore.auth.User; import com.google.firebase.firestore.core.Filter; import com.google.firebase.firestore.core.Query; @@ -1233,6 +1238,372 @@ public void TestCreateTargetIndexesUpgradesPartialIndexToFullIndex() { validateIndexType(subQuery2, IndexManager.IndexType.NONE); } + @Test + public void testIndexesBsonObjectId() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + + addDoc("coll/doc1", map("key", FieldValue.bsonObjectId("507f191e810c19729de860ea"))); + addDoc("coll/doc2", map("key", FieldValue.bsonObjectId("507f191e810c19729de860eb"))); + addDoc("coll/doc3", map("key", FieldValue.bsonObjectId("507f191e810c19729de860ec"))); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); + + query = + query("coll") + .filter(filter("key", "==", FieldValue.bsonObjectId("507f191e810c19729de860ea"))); + verifyResults(query, "coll/doc1"); + + query = + query("coll") + .filter(filter("key", "!=", FieldValue.bsonObjectId("507f191e810c19729de860ea"))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = + query("coll") + .filter(filter("key", ">=", FieldValue.bsonObjectId("507f191e810c19729de860eb"))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = + query("coll") + .filter(filter("key", "<=", FieldValue.bsonObjectId("507f191e810c19729de860eb"))); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = + query("coll") + .filter(filter("key", ">", FieldValue.bsonObjectId("507f191e810c19729de860eb"))); + verifyResults(query, "coll/doc3"); + + query = + query("coll") + .filter(filter("key", "<", FieldValue.bsonObjectId("507f191e810c19729de860eb"))); + verifyResults(query, "coll/doc1"); + + query = + query("coll") + .filter(filter("key", ">", FieldValue.bsonObjectId("507f191e810c19729de860ec"))); + verifyResults(query); + + query = + query("coll") + .filter(filter("key", "<", FieldValue.bsonObjectId("507f191e810c19729de860ea"))); + verifyResults(query); + } + + @Test + public void testIndexesBsonBinary() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + + addDoc("coll/doc1", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); + addDoc("coll/doc2", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4}))); + addDoc("coll/doc3", map("key", FieldValue.bsonBinaryData(1, new byte[] {2, 1, 2}))); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); + + query = + query("coll") + .filter(filter("key", "==", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); + verifyResults(query, "coll/doc1"); + + query = + query("coll") + .filter(filter("key", "!=", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = + query("coll") + .filter(filter("key", ">=", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4}))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = + query("coll") + .filter(filter("key", "<=", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4}))); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = + query("coll") + .filter(filter("key", ">", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4}))); + verifyResults(query, "coll/doc3"); + + query = + query("coll") + .filter(filter("key", "<", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4}))); + verifyResults(query, "coll/doc1"); + + query = + query("coll") + .filter(filter("key", ">", FieldValue.bsonBinaryData(1, new byte[] {2, 1, 2}))); + verifyResults(query); + + query = + query("coll") + .filter(filter("key", "<", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); + verifyResults(query); + } + + @Test + public void testIndexesBsonTimestamp() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + + addDoc("coll/doc1", map("key", FieldValue.bsonTimestamp(1, 1))); + addDoc("coll/doc2", map("key", FieldValue.bsonTimestamp(1, 2))); + addDoc("coll/doc3", map("key", FieldValue.bsonTimestamp(2, 1))); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", FieldValue.bsonTimestamp(1, 1))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", "!=", FieldValue.bsonTimestamp(1, 1))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", FieldValue.bsonTimestamp(1, 2))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", FieldValue.bsonTimestamp(1, 2))); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", FieldValue.bsonTimestamp(1, 2))); + verifyResults(query, "coll/doc3"); + + query = query("coll").filter(filter("key", "<", FieldValue.bsonTimestamp(1, 2))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", ">", FieldValue.bsonTimestamp(2, 1))); + verifyResults(query); + + query = query("coll").filter(filter("key", "<", FieldValue.bsonTimestamp(1, 1))); + verifyResults(query); + } + + @Test + public void testIndexesRegex() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + + addDoc("coll/doc1", map("key", FieldValue.regex("a", "i"))); + addDoc("coll/doc2", map("key", FieldValue.regex("a", "m"))); + addDoc("coll/doc3", map("key", FieldValue.regex("b", "i"))); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", FieldValue.regex("a", "i"))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", "!=", FieldValue.regex("a", "i"))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", FieldValue.regex("a", "m"))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", FieldValue.regex("a", "m"))); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", FieldValue.regex("a", "m"))); + verifyResults(query, "coll/doc3"); + + query = query("coll").filter(filter("key", "<", FieldValue.regex("a", "m"))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", ">", FieldValue.regex("b", "i"))); + verifyResults(query); + + query = query("coll").filter(filter("key", "<", FieldValue.regex("a", "i"))); + verifyResults(query); + } + + @Test + public void testIndexesInt32() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + + addDoc("coll/doc1", map("key", FieldValue.int32(1))); + addDoc("coll/doc2", map("key", FieldValue.int32(2))); + addDoc("coll/doc3", map("key", FieldValue.int32(3))); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", FieldValue.int32(1))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", "!=", FieldValue.int32(1))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", FieldValue.int32(2))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", FieldValue.int32(2))); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", FieldValue.int32(2))); + verifyResults(query, "coll/doc3"); + + query = query("coll").filter(filter("key", "<", FieldValue.int32(2))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", ">", FieldValue.int32(3))); + verifyResults(query); + + query = query("coll").filter(filter("key", "<", FieldValue.int32(1))); + verifyResults(query); + } + + @Test + public void testIndexesMinKey() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + addDoc("coll/doc1", map("key", FieldValue.minKey())); + addDoc("coll/doc2", map("key", FieldValue.minKey())); + addDoc("coll/doc3", map("key", null)); + addDoc("coll/doc4", map("key", 1)); + addDoc("coll/doc5", map("key", FieldValue.maxKey())); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc3", "coll/doc1", "coll/doc2", "coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", "==", FieldValue.minKey())); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", "!=", FieldValue.minKey())); + verifyResults(query, "coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", ">=", FieldValue.minKey())); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", "<=", FieldValue.minKey())); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", FieldValue.minKey())); + verifyResults(query); + + query = query("coll").filter(filter("key", "<", FieldValue.minKey())); + verifyResults(query); + } + + @Test + public void testIndexesMaxKey() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + addDoc("coll/doc1", map("key", FieldValue.minKey())); + addDoc("coll/doc2", map("key", 1)); + addDoc("coll/doc3", map("key", FieldValue.maxKey())); + addDoc("coll/doc4", map("key", FieldValue.maxKey())); + addDoc("coll/doc5", map("key", null)); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc5", "coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4"); + + query = query("coll").filter(filter("key", "==", FieldValue.maxKey())); + verifyResults(query, "coll/doc3", "coll/doc4"); + + query = query("coll").filter(filter("key", "!=", FieldValue.maxKey())); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">=", FieldValue.maxKey())); + verifyResults(query, "coll/doc3", "coll/doc4"); + + query = query("coll").filter(filter("key", "<=", FieldValue.maxKey())); + verifyResults(query, "coll/doc3", "coll/doc4"); + + query = query("coll").filter(filter("key", ">", FieldValue.maxKey())); + verifyResults(query); + + query = query("coll").filter(filter("key", "<", FieldValue.maxKey())); + verifyResults(query); + } + + @Test + public void testIndexFieldsOfBsonTypesTogether() { + indexManager.addFieldIndex(fieldIndex("coll", "key", Kind.DESCENDING)); + + addDoc("coll/doc1", map("key", FieldValue.minKey())); + addDoc("coll/doc2", map("key", FieldValue.int32(2))); + addDoc("coll/doc3", map("key", FieldValue.int32(1))); + addDoc("coll/doc4", map("key", FieldValue.bsonTimestamp(1, 2))); + addDoc("coll/doc5", map("key", FieldValue.bsonTimestamp(1, 1))); + addDoc("coll/doc6", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4}))); + addDoc("coll/doc7", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); + addDoc("coll/doc8", map("key", FieldValue.bsonObjectId("507f191e810c19729de860eb"))); + addDoc("coll/doc9", map("key", FieldValue.bsonObjectId("507f191e810c19729de860ea"))); + addDoc("coll/doc10", map("key", FieldValue.regex("a", "m"))); + addDoc("coll/doc11", map("key", FieldValue.regex("a", "i"))); + addDoc("coll/doc12", map("key", FieldValue.maxKey())); + + Query query = query("coll").orderBy(orderBy("key", "desc")); + verifyResults( + query, + "coll/doc12", // maxKey + "coll/doc10", // regex m + "coll/doc11", // regex i + "coll/doc8", // objectId eb + "coll/doc9", // objectId ea + "coll/doc6", // binary [1,2,4] + "coll/doc7", // binary [1,2,3] + "coll/doc4", // timestamp 1,2 + "coll/doc5", // timestamp 1,1 + "coll/doc2", // int32 2 + "coll/doc3", // int32 1 + "coll/doc1" // minKey + ); + } + + @Test + public void testIndexFieldsOfAllTypesTogether() { + indexManager.addFieldIndex(fieldIndex("coll", "key", Kind.DESCENDING)); + + addDoc("coll/a", map("key", null)); + addDoc("coll/b", map("key", FieldValue.minKey())); + addDoc("coll/c", map("key", true)); + addDoc("coll/d", map("key", Double.NaN)); + addDoc("coll/e", map("key", FieldValue.int32(1))); + addDoc("coll/f", map("key", 2.0)); + addDoc("coll/g", map("key", 3L)); + addDoc("coll/h", map("key", new Timestamp(100, 123456000))); + addDoc("coll/i", map("key", FieldValue.bsonTimestamp(1, 2))); + addDoc("coll/j", map("key", "string")); + addDoc("coll/k", map("key", blob(1, 2, 3))); + addDoc("coll/l", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); + addDoc("coll/m", map("key", ref("foo/bar"))); + addDoc("coll/n", map("key", FieldValue.bsonObjectId("507f191e810c19729de860ea"))); + addDoc("coll/o", map("key", new GeoPoint(0, 1))); + addDoc("coll/p", map("key", FieldValue.regex("^foo", "i"))); + addDoc("coll/q", map("key", Arrays.asList(1, 2))); + // Note: Vector type not available in Java SDK, skipping 'r' + addDoc("coll/s", map("key", map("a", 1))); + addDoc("coll/t", map("key", FieldValue.maxKey())); + + Query query = query("coll").orderBy(orderBy("key", "desc")); + verifyResults( + query, + "coll/t", // maxKey + "coll/s", // map + "coll/q", // array + "coll/p", // regex + "coll/o", // geopoint + "coll/n", // objectId + "coll/m", // reference + "coll/l", // bsonBinary + "coll/k", // bytes + "coll/j", // string + "coll/i", // bsonTimestamp + "coll/h", // timestamp + "coll/g", // long + "coll/f", // double + "coll/e", // int32 + "coll/d", // NaN + "coll/c", // boolean + "coll/b", // minKey + "coll/a" // null + ); + } + private void validateIndexType(Query query, IndexManager.IndexType expected) { IndexManager.IndexType indexType = indexManager.getIndexType(query.toTarget()); assertEquals(indexType, expected); diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteLocalStoreTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteLocalStoreTest.java index 63569e6dc85..0c78edaca84 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteLocalStoreTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteLocalStoreTest.java @@ -16,6 +16,7 @@ import static com.google.common.truth.Truth.assertThat; import static com.google.firebase.firestore.testutil.TestUtil.addedRemoteEvent; +import static com.google.firebase.firestore.testutil.TestUtil.blob; import static com.google.firebase.firestore.testutil.TestUtil.deleteMutation; import static com.google.firebase.firestore.testutil.TestUtil.deletedDoc; import static com.google.firebase.firestore.testutil.TestUtil.doc; @@ -27,6 +28,7 @@ import static com.google.firebase.firestore.testutil.TestUtil.orFilters; import static com.google.firebase.firestore.testutil.TestUtil.orderBy; import static com.google.firebase.firestore.testutil.TestUtil.query; +import static com.google.firebase.firestore.testutil.TestUtil.ref; import static com.google.firebase.firestore.testutil.TestUtil.setMutation; import static com.google.firebase.firestore.testutil.TestUtil.updateRemoteEvent; import static com.google.firebase.firestore.testutil.TestUtil.version; @@ -35,6 +37,7 @@ import com.google.firebase.Timestamp; import com.google.firebase.firestore.FieldValue; +import com.google.firebase.firestore.GeoPoint; import com.google.firebase.firestore.core.Query; import com.google.firebase.firestore.model.DocumentKey; import com.google.firebase.firestore.model.FieldIndex; @@ -367,6 +370,850 @@ public void testIndexesVectorValues() { assertQueryReturned("coll/doc4", "coll/doc3"); } + @Test + public void testIndexesBsonObjectId() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation( + setMutation("coll/doc1", map("key", FieldValue.bsonObjectId("507f191e810c19729de860ea")))); + writeMutation( + setMutation("coll/doc2", map("key", FieldValue.bsonObjectId("507f191e810c19729de860eb")))); + writeMutation( + setMutation("coll/doc3", map("key", FieldValue.bsonObjectId("507f191e810c19729de860ec")))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); + + query = + query("coll") + .filter(filter("key", "==", FieldValue.bsonObjectId("507f191e810c19729de860ea"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1"); + + query = + query("coll") + .filter(filter("key", "!=", FieldValue.bsonObjectId("507f191e810c19729de860ea"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = + query("coll") + .filter(filter("key", ">=", FieldValue.bsonObjectId("507f191e810c19729de860eb"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = + query("coll") + .filter(filter("key", "<=", FieldValue.bsonObjectId("507f191e810c19729de860eb"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2"); + + query = + query("coll") + .filter(filter("key", ">", FieldValue.bsonObjectId("507f191e810c19729de860ec"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll") + .filter(filter("key", "<", FieldValue.bsonObjectId("507f191e810c19729de860ea"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll") + .filter( + filter( + "key", + "in", + Arrays.asList( + FieldValue.bsonObjectId("507f191e810c19729de860ea"), + FieldValue.bsonObjectId("507f191e810c19729de860eb")))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2"); + } + + @Test + public void testIndexesBsonTimestamp() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation(setMutation("coll/doc1", map("key", FieldValue.bsonTimestamp(1000, 1000)))); + writeMutation(setMutation("coll/doc2", map("key", FieldValue.bsonTimestamp(1001, 1000)))); + writeMutation(setMutation("coll/doc3", map("key", FieldValue.bsonTimestamp(1000, 1001)))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc3", "coll/doc2"); + + query = query("coll").filter(filter("key", "==", FieldValue.bsonTimestamp(1000, 1000))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1"); + + query = query("coll").filter(filter("key", "!=", FieldValue.bsonTimestamp(1000, 1000))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc3", "coll/doc2"); + + query = query("coll").filter(filter("key", ">=", FieldValue.bsonTimestamp(1000, 1001))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc3", "coll/doc2"); + + query = query("coll").filter(filter("key", "<=", FieldValue.bsonTimestamp(1000, 1001))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc3"); + + query = query("coll").filter(filter("key", ">", FieldValue.bsonTimestamp(1001, 1000))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = query("coll").filter(filter("key", "<", FieldValue.bsonTimestamp(1000, 1000))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll") + .filter( + filter( + "key", + "in", + Arrays.asList( + FieldValue.bsonTimestamp(1000, 1000), + FieldValue.bsonTimestamp(1000, 1001)))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc3"); + } + + @Test + public void testIndexesBsonBinary() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation( + setMutation("coll/doc1", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})))); + writeMutation( + setMutation("coll/doc2", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2})))); + writeMutation( + setMutation("coll/doc3", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4})))); + writeMutation( + setMutation("coll/doc4", map("key", FieldValue.bsonBinaryData(2, new byte[] {1, 2})))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 4, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc4", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc1", "coll/doc3", "coll/doc4"); + + query = + query("coll") + .filter(filter("key", "==", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1"); + + query = + query("coll") + .filter(filter("key", "!=", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc4", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3", "coll/doc4"); + + query = + query("coll") + .filter(filter("key", ">=", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc4", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc3", "coll/doc4"); + + query = + query("coll") + .filter(filter("key", "<=", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc1"); + + query = + query("coll").filter(filter("key", ">", FieldValue.bsonBinaryData(2, new byte[] {1, 2}))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll").filter(filter("key", "<", FieldValue.bsonBinaryData(1, new byte[] {1, 2}))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll") + .filter( + filter( + "key", + "in", + Arrays.asList( + FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}), + FieldValue.bsonBinaryData(1, new byte[] {1, 2})))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2"); + } + + @Test + public void testIndexesRegex() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation(setMutation("coll/doc1", map("key", FieldValue.regex("^bar", "i")))); + writeMutation(setMutation("coll/doc2", map("key", FieldValue.regex("^bar", "m")))); + writeMutation(setMutation("coll/doc3", map("key", FieldValue.regex("^foo", "i")))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", FieldValue.regex("^bar", "i"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1"); + + query = query("coll").filter(filter("key", "!=", FieldValue.regex("^bar", "i"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">", FieldValue.regex("^foo", "i"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = query("coll").filter(filter("key", "<", FieldValue.regex("^bar", "i"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll") + .filter( + filter( + "key", + "in", + Arrays.asList(FieldValue.regex("^bar", "i"), FieldValue.regex("^foo", "i")))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc3"); + } + + @Test + public void testIndexesInt32() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + writeMutation(setMutation("coll/doc1", map("key", FieldValue.int32(-1)))); + writeMutation(setMutation("coll/doc2", map("key", FieldValue.int32(0)))); + writeMutation(setMutation("coll/doc3", map("key", FieldValue.int32(1)))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", FieldValue.int32(-1))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1"); + + query = query("coll").filter(filter("key", "!=", FieldValue.int32(-1))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", FieldValue.int32(0))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", FieldValue.int32(0))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", FieldValue.int32(1))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = query("coll").filter(filter("key", "<", FieldValue.int32(-1))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll") + .filter(filter("key", "in", Arrays.asList(FieldValue.int32(-1), FieldValue.int32(0)))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2"); + } + + @Test + public void testIndexesMinKey() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation(setMutation("coll/doc1", map("key", null))); + writeMutation(setMutation("coll/doc2", map("key", FieldValue.minKey()))); + writeMutation(setMutation("coll/doc3", map("key", FieldValue.minKey()))); + writeMutation(setMutation("coll/doc4", map("key", 1))); + writeMutation(setMutation("coll/doc5", map("key", FieldValue.maxKey()))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 5, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", "==", FieldValue.minKey())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "!=", FieldValue.minKey())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", ">=", FieldValue.minKey())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", FieldValue.minKey())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">", FieldValue.minKey())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = query("coll").filter(filter("key", "<", FieldValue.minKey())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll") + .filter(filter("key", "in", Arrays.asList(FieldValue.minKey(), FieldValue.maxKey()))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3", "coll/doc5"); + } + + @Test + public void testIndexesMaxKey() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation(setMutation("coll/doc1", map("key", null))); + writeMutation(setMutation("coll/doc2", map("key", FieldValue.minKey()))); + writeMutation(setMutation("coll/doc3", map("key", 1))); + writeMutation(setMutation("coll/doc4", map("key", FieldValue.maxKey()))); + writeMutation(setMutation("coll/doc5", map("key", FieldValue.maxKey()))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 5, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", "==", FieldValue.maxKey())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", "!=", FieldValue.maxKey())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", FieldValue.maxKey())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", "<=", FieldValue.maxKey())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc4", "coll/doc5"); + + query = query("coll").filter(filter("key", ">", FieldValue.maxKey())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = query("coll").filter(filter("key", "<", FieldValue.maxKey())); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + } + + @Test + public void testIndexesAllBsonTypesTogether() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.DESCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation(setMutation("coll/doc1", map("key", FieldValue.minKey()))); + writeMutation(setMutation("coll/doc2", map("key", FieldValue.int32(2)))); + writeMutation(setMutation("coll/doc3", map("key", FieldValue.int32(1)))); + writeMutation(setMutation("coll/doc4", map("key", FieldValue.bsonTimestamp(1000, 1001)))); + writeMutation(setMutation("coll/doc5", map("key", FieldValue.bsonTimestamp(1000, 1000)))); + writeMutation( + setMutation("coll/doc6", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4})))); + writeMutation( + setMutation("coll/doc7", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})))); + writeMutation( + setMutation("coll/doc8", map("key", FieldValue.bsonObjectId("507f191e810c19729de860eb")))); + writeMutation( + setMutation("coll/doc9", map("key", FieldValue.bsonObjectId("507f191e810c19729de860ea")))); + writeMutation(setMutation("coll/doc10", map("key", FieldValue.regex("^bar", "m")))); + writeMutation(setMutation("coll/doc11", map("key", FieldValue.regex("^bar", "i")))); + writeMutation(setMutation("coll/doc12", map("key", FieldValue.maxKey()))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "desc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 12, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set, + "coll/doc6", + CountingQueryEngine.OverlayType.Set, + "coll/doc7", + CountingQueryEngine.OverlayType.Set, + "coll/doc8", + CountingQueryEngine.OverlayType.Set, + "coll/doc9", + CountingQueryEngine.OverlayType.Set, + "coll/doc10", + CountingQueryEngine.OverlayType.Set, + "coll/doc11", + CountingQueryEngine.OverlayType.Set, + "coll/doc12", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned( + "coll/doc12", + "coll/doc10", + "coll/doc11", + "coll/doc8", + "coll/doc9", + "coll/doc6", + "coll/doc7", + "coll/doc4", + "coll/doc5", + "coll/doc2", + "coll/doc3", + "coll/doc1"); + } + + @Test + public void testIndexesAllTypesTogether() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + + writeMutation(setMutation("coll/doc1", map("key", null))); + writeMutation(setMutation("coll/doc2", map("key", FieldValue.minKey()))); + writeMutation(setMutation("coll/doc3", map("key", true))); + writeMutation(setMutation("coll/doc4", map("key", Double.NaN))); + writeMutation(setMutation("coll/doc5", map("key", FieldValue.int32(1)))); + writeMutation(setMutation("coll/doc6", map("key", 2.0))); + writeMutation(setMutation("coll/doc7", map("key", 3))); + writeMutation(setMutation("coll/doc8", map("key", new Timestamp(100, 123456000)))); + writeMutation(setMutation("coll/doc9", map("key", FieldValue.bsonTimestamp(1, 2)))); + writeMutation(setMutation("coll/doc10", map("key", "string"))); + writeMutation(setMutation("coll/doc11", map("key", blob(1, 2, 3)))); + writeMutation( + setMutation("coll/doc12", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})))); + writeMutation(setMutation("coll/doc13", map("key", ref("foo/bar")))); + writeMutation( + setMutation("coll/doc14", map("key", FieldValue.bsonObjectId("507f191e810c19729de860ea")))); + writeMutation(setMutation("coll/doc15", map("key", new GeoPoint(1, 2)))); + writeMutation(setMutation("coll/doc16", map("key", FieldValue.regex("^bar", "m")))); + writeMutation(setMutation("coll/doc17", map("key", Arrays.asList(2, "foo")))); + writeMutation(setMutation("coll/doc18", map("key", FieldValue.vector(new double[] {1, 2, 3})))); + writeMutation(setMutation("coll/doc19", map("key", map("bar", 1, "foo", 2)))); + writeMutation(setMutation("coll/doc20", map("key", FieldValue.maxKey()))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 20, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set, + "coll/doc4", + CountingQueryEngine.OverlayType.Set, + "coll/doc5", + CountingQueryEngine.OverlayType.Set, + "coll/doc6", + CountingQueryEngine.OverlayType.Set, + "coll/doc7", + CountingQueryEngine.OverlayType.Set, + "coll/doc8", + CountingQueryEngine.OverlayType.Set, + "coll/doc9", + CountingQueryEngine.OverlayType.Set, + "coll/doc10", + CountingQueryEngine.OverlayType.Set, + "coll/doc11", + CountingQueryEngine.OverlayType.Set, + "coll/doc12", + CountingQueryEngine.OverlayType.Set, + "coll/doc13", + CountingQueryEngine.OverlayType.Set, + "coll/doc14", + CountingQueryEngine.OverlayType.Set, + "coll/doc15", + CountingQueryEngine.OverlayType.Set, + "coll/doc16", + CountingQueryEngine.OverlayType.Set, + "coll/doc17", + CountingQueryEngine.OverlayType.Set, + "coll/doc18", + CountingQueryEngine.OverlayType.Set, + "coll/doc19", + CountingQueryEngine.OverlayType.Set, + "coll/doc20", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned( + "coll/doc1", + "coll/doc2", + "coll/doc3", + "coll/doc4", + "coll/doc5", + "coll/doc6", + "coll/doc7", + "coll/doc8", + "coll/doc9", + "coll/doc10", + "coll/doc11", + "coll/doc12", + "coll/doc13", + "coll/doc14", + "coll/doc15", + "coll/doc16", + "coll/doc17", + "coll/doc18", + "coll/doc19", + "coll/doc20"); + } + @Test public void testIndexesServerTimestamps() { FieldIndex index = @@ -493,7 +1340,7 @@ public void testCanAutoCreateIndexesWorksWithOrQuery() { // Full matched index should be created. executeQuery(query); assertRemoteDocumentsRead(/* byKey= */ 0, /* byCollection= */ 2); - assertQueryReturned("coll/e", "coll/a"); + assertQueryReturned("coll/a", "coll/e"); backfillIndexes(); @@ -501,7 +1348,7 @@ public void testCanAutoCreateIndexesWorksWithOrQuery() { executeQuery(query); assertRemoteDocumentsRead(/* byKey= */ 2, /* byCollection= */ 1); - assertQueryReturned("coll/f", "coll/e", "coll/a"); + assertQueryReturned("coll/a", "coll/e", "coll/f"); } @Test @@ -521,7 +1368,7 @@ public void testDoesNotAutoCreateIndexesForSmallCollections() { // SDK will not create indexes since collection size is too small. executeQuery(query); assertRemoteDocumentsRead(/* byKey= */ 0, /* byCollection= */ 2); - assertQueryReturned("coll/a", "coll/e"); + assertQueryReturned("coll/e", "coll/a"); backfillIndexes(); @@ -529,7 +1376,7 @@ public void testDoesNotAutoCreateIndexesForSmallCollections() { executeQuery(query); assertRemoteDocumentsRead(/* byKey= */ 0, /* byCollection= */ 3); - assertQueryReturned("coll/a", "coll/e", "coll/f"); + assertQueryReturned("coll/e", "coll/f", "coll/a"); } @Test @@ -598,7 +1445,7 @@ public void testIndexAutoCreationWorksWhenBackfillerRunsHalfway() { executeQuery(query); assertRemoteDocumentsRead(/* byKey= */ 1, /* byCollection= */ 2); - assertQueryReturned("coll/a", "coll/e", "coll/f"); + assertQueryReturned("coll/a", "coll/f", "coll/e"); } @Test @@ -621,7 +1468,7 @@ public void testIndexCreatedByIndexAutoCreationExistsAfterTurnOffAutoCreation() // Full matched index should be created. executeQuery(query); assertRemoteDocumentsRead(/* byKey= */ 0, /* byCollection= */ 2); - assertQueryReturned("coll/a", "coll/e"); + assertQueryReturned("coll/e", "coll/a"); setIndexAutoCreationEnabled(false); @@ -631,7 +1478,7 @@ public void testIndexCreatedByIndexAutoCreationExistsAfterTurnOffAutoCreation() executeQuery(query); assertRemoteDocumentsRead(/* byKey= */ 2, /* byCollection= */ 1); - assertQueryReturned("coll/a", "coll/e", "coll/f"); + assertQueryReturned("coll/e", "coll/a", "coll/f"); } @Test diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java index 400e34481dd..f4d83b2be91 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java @@ -23,6 +23,8 @@ import static com.google.firebase.firestore.testutil.TestUtil.ref; import static com.google.firebase.firestore.testutil.TestUtil.wrapRef; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; import com.google.common.testing.EqualsTester; import com.google.firebase.Timestamp; @@ -35,6 +37,7 @@ import com.google.firebase.firestore.MaxKey; import com.google.firebase.firestore.MinKey; import com.google.firebase.firestore.RegexValue; +import com.google.firebase.firestore.model.Values.MapRepresentation; import com.google.firebase.firestore.testutil.ComparatorTester; import com.google.firebase.firestore.testutil.TestUtil; import com.google.firestore.v1.Value; @@ -314,17 +317,23 @@ public void testValueOrdering() { @Test public void testLowerBound() { - // TODO(mila/BSON): add new bson types new ComparatorTester() - // null first + // lower bound of null is null .addEqualityGroup(wrap(getLowerBound(TestUtil.wrap((Object) null))), wrap((Object) null)) + // lower bound of MinKey is MinKey + .addEqualityGroup( + wrap(getLowerBound(TestUtil.wrap(FieldValue.minKey()))), wrap(FieldValue.minKey())) + // booleans .addEqualityGroup(wrap(false), wrap(getLowerBound(TestUtil.wrap(true)))) .addEqualityGroup(wrap(true)) // numbers - .addEqualityGroup(wrap(getLowerBound(TestUtil.wrap(1.0))), wrap(Double.NaN)) + .addEqualityGroup( + wrap(getLowerBound(TestUtil.wrap(1.0))), + wrap(Double.NaN), + wrap(getLowerBound(TestUtil.wrap(FieldValue.int32(1))))) .addEqualityGroup(wrap(Double.NEGATIVE_INFINITY)) .addEqualityGroup(wrap(Long.MIN_VALUE)) @@ -332,6 +341,12 @@ public void testLowerBound() { .addEqualityGroup(wrap(getLowerBound(TestUtil.wrap(date1)))) .addEqualityGroup(wrap(date1)) + // bson timestamps + .addEqualityGroup( + wrap(getLowerBound(TestUtil.wrap(new BsonTimestamp(4294967295L, 4294967295L)))), + wrap(new BsonTimestamp(0, 0))) + .addEqualityGroup(wrap(new BsonTimestamp(1, 1))) + // strings .addEqualityGroup(wrap(getLowerBound(TestUtil.wrap("foo"))), wrap("")) .addEqualityGroup(wrap("\000")) @@ -340,17 +355,35 @@ public void testLowerBound() { .addEqualityGroup(wrap(getLowerBound(TestUtil.wrap(blob(1, 2, 3)))), wrap(blob())) .addEqualityGroup(wrap(blob(0))) + // bson binary data + .addEqualityGroup( + wrap(getLowerBound(TestUtil.wrap(BsonBinaryData.fromBytes(128, new byte[] {1, 2, 3})))), + wrap(BsonBinaryData.fromBytes(0, new byte[] {})), + wrap(BsonBinaryData.fromByteString((byte) 0, ByteString.EMPTY))) + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(0, new byte[] {0}))) + // resource names .addEqualityGroup( wrap(getLowerBound(wrapRef(dbId("foo", "bar"), key("x/y")))), wrap(wrapRef(dbId("", ""), key("")))) .addEqualityGroup(wrap(wrapRef(dbId("", ""), key("a/a")))) + // bson object ids + .addEqualityGroup( + wrap(getLowerBound(TestUtil.wrap(new BsonObjectId("zzz")))), wrap(new BsonObjectId(""))) + .addEqualityGroup(wrap(new BsonObjectId("a"))) + // geo points .addEqualityGroup( wrap(getLowerBound(TestUtil.wrap(new GeoPoint(-90, 0)))), wrap(new GeoPoint(-90, -180))) .addEqualityGroup(wrap(new GeoPoint(-90, 0))) + // regular expressions + .addEqualityGroup( + wrap(getLowerBound(TestUtil.wrap(FieldValue.regex("^foo", "i")))), + wrap(FieldValue.regex("", ""))) + .addEqualityGroup(wrap(FieldValue.regex("^foo", "i"))) + // arrays .addEqualityGroup( wrap(getLowerBound(TestUtil.wrap(Collections.singletonList(false)))), @@ -369,22 +402,33 @@ public void testLowerBound() { // objects .addEqualityGroup(wrap(getLowerBound(TestUtil.wrap(map("foo", "bar")))), wrap(map())) + + // maxKey + .addEqualityGroup(wrap(FieldValue.maxKey())) .testCompare(); } @Test public void testUpperBound() { - // TODO(mila/BSON): add new bson types new ComparatorTester() // null first .addEqualityGroup(wrap((Object) null)) - .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap((Object) null)))) + + // upper value of null is MinKey + .addEqualityGroup( + wrap(getUpperBound(TestUtil.wrap((Object) null))), + wrap(FieldValue.minKey()), + wrap(MinKey.instance())) + + // upper value of MinKey is boolean `false` + .addEqualityGroup(wrap(false), wrap(getUpperBound(TestUtil.wrap(FieldValue.minKey())))) // booleans .addEqualityGroup(wrap(true)) .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(false)))) // numbers + .addEqualityGroup(wrap(FieldValue.int32(2147483647))) // largest int32 value .addEqualityGroup(wrap(Long.MAX_VALUE)) .addEqualityGroup(wrap(Double.POSITIVE_INFINITY)) .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(1.0)))) @@ -393,6 +437,11 @@ public void testUpperBound() { .addEqualityGroup(wrap(date1)) .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(date1)))) + // bson timestamps + .addEqualityGroup( + wrap(new BsonTimestamp(4294967295L, 4294967295L))) // largest bson timestamp value + .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(new BsonTimestamp(1, 1))))) + // strings .addEqualityGroup(wrap("\000")) .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap("\000")))) @@ -401,14 +450,27 @@ public void testUpperBound() { .addEqualityGroup(wrap(blob(255))) .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(blob(255))))) + // bson binary data + .addEqualityGroup(wrap(BsonBinaryData.fromBytes(128, new byte[] {1, 2}))) + .addEqualityGroup( + wrap(getUpperBound(TestUtil.wrap(BsonBinaryData.fromBytes(0, new byte[] {}))))) + // resource names .addEqualityGroup(wrap(wrapRef(dbId("", ""), key("a/a")))) .addEqualityGroup(wrap(getUpperBound(wrapRef(dbId("", ""), key("a/a"))))) + // bson object ids + .addEqualityGroup(wrap(new BsonObjectId("zzz"))) + .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(new BsonObjectId("a"))))) + // geo points .addEqualityGroup(wrap(new GeoPoint(90, 180))) .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(new GeoPoint(90, 180))))) + // regular expressions + .addEqualityGroup(wrap(FieldValue.regex("^foo", "i"))) + .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(FieldValue.regex("", ""))))) + // arrays .addEqualityGroup(wrap(Collections.singletonList(false))) .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(Collections.singletonList(false))))) @@ -424,7 +486,12 @@ public void testUpperBound() { // objects .addEqualityGroup(wrap(map("a", "b"))) - .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(map("a", "b"))))) + + // upper value of objects is MaxKey + .addEqualityGroup( + wrap(getUpperBound(TestUtil.wrap(map("a", "b")))), + wrap(FieldValue.maxKey()), + wrap(MaxKey.instance())) .testCompare(); } @@ -473,6 +540,94 @@ private void assertCanonicalId(Value proto, String expectedCanonicalId) { assertEquals(expectedCanonicalId, Values.canonicalId(proto)); } + @Test + public void DetectsBsonTypesCorrectly() { + Value minKeyValue = TestUtil.wrap(FieldValue.minKey()); + Value maxKeyValue = TestUtil.wrap(FieldValue.maxKey()); + Value int32Value = TestUtil.wrap(FieldValue.int32(1)); + Value regexValue = TestUtil.wrap(FieldValue.regex("^foo", "i")); + Value bsonTimestampValue = TestUtil.wrap(FieldValue.bsonTimestamp(1, 2)); + Value bsonObjectIdValue = TestUtil.wrap(FieldValue.bsonObjectId("foo")); + Value bsonBinaryDataValue1 = TestUtil.wrap(FieldValue.bsonBinaryData(1, new byte[] {})); + Value bsonBinaryDataValue2 = TestUtil.wrap(FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4})); + + assertTrue(Values.isMinKey(minKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isMinKey(maxKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isMinKey(int32Value.getMapValue().getFieldsMap())); + assertFalse(Values.isMinKey(regexValue.getMapValue().getFieldsMap())); + assertFalse(Values.isMinKey(bsonTimestampValue.getMapValue().getFieldsMap())); + assertFalse(Values.isMinKey(bsonObjectIdValue.getMapValue().getFieldsMap())); + assertFalse(Values.isMinKey(bsonBinaryDataValue1.getMapValue().getFieldsMap())); + assertFalse(Values.isMinKey(bsonBinaryDataValue2.getMapValue().getFieldsMap())); + + assertFalse(Values.isMaxKey(minKeyValue.getMapValue().getFieldsMap())); + assertTrue(Values.isMaxKey(maxKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isMaxKey(int32Value.getMapValue().getFieldsMap())); + assertFalse(Values.isMaxKey(regexValue.getMapValue().getFieldsMap())); + assertFalse(Values.isMaxKey(bsonTimestampValue.getMapValue().getFieldsMap())); + assertFalse(Values.isMaxKey(bsonObjectIdValue.getMapValue().getFieldsMap())); + assertFalse(Values.isMaxKey(bsonBinaryDataValue1.getMapValue().getFieldsMap())); + assertFalse(Values.isMaxKey(bsonBinaryDataValue2.getMapValue().getFieldsMap())); + + assertFalse(Values.isInt32Value(minKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isInt32Value(maxKeyValue.getMapValue().getFieldsMap())); + assertTrue(Values.isInt32Value(int32Value.getMapValue().getFieldsMap())); + assertFalse(Values.isInt32Value(regexValue.getMapValue().getFieldsMap())); + assertFalse(Values.isInt32Value(bsonTimestampValue.getMapValue().getFieldsMap())); + assertFalse(Values.isInt32Value(bsonObjectIdValue.getMapValue().getFieldsMap())); + assertFalse(Values.isInt32Value(bsonBinaryDataValue1.getMapValue().getFieldsMap())); + assertFalse(Values.isInt32Value(bsonBinaryDataValue2.getMapValue().getFieldsMap())); + + assertFalse(Values.isRegexValue(minKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isRegexValue(maxKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isRegexValue(int32Value.getMapValue().getFieldsMap())); + assertTrue(Values.isRegexValue(regexValue.getMapValue().getFieldsMap())); + assertFalse(Values.isRegexValue(bsonTimestampValue.getMapValue().getFieldsMap())); + assertFalse(Values.isRegexValue(bsonObjectIdValue.getMapValue().getFieldsMap())); + assertFalse(Values.isRegexValue(bsonBinaryDataValue1.getMapValue().getFieldsMap())); + assertFalse(Values.isRegexValue(bsonBinaryDataValue2.getMapValue().getFieldsMap())); + + assertFalse(Values.isBsonTimestamp(minKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonTimestamp(maxKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonTimestamp(int32Value.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonTimestamp(regexValue.getMapValue().getFieldsMap())); + assertTrue(Values.isBsonTimestamp(bsonTimestampValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonTimestamp(bsonObjectIdValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonTimestamp(bsonBinaryDataValue1.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonTimestamp(bsonBinaryDataValue2.getMapValue().getFieldsMap())); + + assertFalse(Values.isBsonObjectId(minKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonObjectId(maxKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonObjectId(int32Value.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonObjectId(regexValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonObjectId(bsonTimestampValue.getMapValue().getFieldsMap())); + assertTrue(Values.isBsonObjectId(bsonObjectIdValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonObjectId(bsonBinaryDataValue1.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonObjectId(bsonBinaryDataValue2.getMapValue().getFieldsMap())); + + assertFalse(Values.isBsonBinaryData(minKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonBinaryData(maxKeyValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonBinaryData(int32Value.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonBinaryData(regexValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonBinaryData(bsonTimestampValue.getMapValue().getFieldsMap())); + assertFalse(Values.isBsonBinaryData(bsonObjectIdValue.getMapValue().getFieldsMap())); + assertTrue(Values.isBsonBinaryData(bsonBinaryDataValue1.getMapValue().getFieldsMap())); + assertTrue(Values.isBsonBinaryData(bsonBinaryDataValue2.getMapValue().getFieldsMap())); + + assertEquals(Values.detectMapRepresentation(minKeyValue), MapRepresentation.MIN_KEY); + assertEquals(Values.detectMapRepresentation(maxKeyValue), MapRepresentation.MAX_KEY); + assertEquals(Values.detectMapRepresentation(int32Value), MapRepresentation.INT32); + assertEquals(Values.detectMapRepresentation(regexValue), MapRepresentation.REGEX); + assertEquals( + Values.detectMapRepresentation(bsonTimestampValue), MapRepresentation.BSON_TIMESTAMP); + assertEquals( + Values.detectMapRepresentation(bsonObjectIdValue), MapRepresentation.BSON_OBJECT_ID); + assertEquals( + Values.detectMapRepresentation(bsonBinaryDataValue1), MapRepresentation.BSON_BINARY); + assertEquals( + Values.detectMapRepresentation(bsonBinaryDataValue2), MapRepresentation.BSON_BINARY); + } + /** Small helper class that uses ProtoValues for equals() and compareTo(). */ static class EqualsWrapper implements Comparable { final Value proto; From ef4e5ea7ddd5353d516ddf53d5345e7f24ab240a Mon Sep 17 00:00:00 2001 From: Mila <107142260+milaGGL@users.noreply.github.com> Date: Thu, 1 May 2025 12:43:13 -0400 Subject: [PATCH 3/8] Remove FieldValue constructor methods (#578) --- firebase-firestore/api.txt | 7 - .../firebase/firestore/BsonTypesTest.java | 272 +++++++++--------- .../google/firebase/firestore/POJOTest.java | 14 +- .../google/firebase/firestore/TypeTest.java | 56 ++-- .../google/firebase/firestore/FieldValue.java | 84 +----- .../firebase/firestore/UserDataWriter.java | 4 +- .../firebase/firestore/BsonTypesTest.java | 18 +- .../firebase/firestore/FieldValueTest.java | 28 +- .../firestore/UserDataWriterTest.java | 14 +- .../firebase/firestore/core/QueryTest.java | 22 +- .../index/FirestoreIndexValueWriterTest.java | 24 +- .../local/SQLiteIndexManagerTest.java | 212 +++++++------- .../firestore/local/SQLiteLocalStoreTest.java | 199 +++++++------ .../firebase/firestore/model/ValuesTest.java | 118 ++++---- .../remote/RemoteSerializerTest.java | 20 +- 15 files changed, 490 insertions(+), 602 deletions(-) diff --git a/firebase-firestore/api.txt b/firebase-firestore/api.txt index 8ae38dc264b..f14627e2905 100644 --- a/firebase-firestore/api.txt +++ b/firebase-firestore/api.txt @@ -175,16 +175,9 @@ package com.google.firebase.firestore { public abstract class FieldValue { method public static com.google.firebase.firestore.FieldValue arrayRemove(java.lang.Object!...!); method public static com.google.firebase.firestore.FieldValue arrayUnion(java.lang.Object!...!); - method public static com.google.firebase.firestore.BsonBinaryData bsonBinaryData(int, byte[]); - method public static com.google.firebase.firestore.BsonObjectId bsonObjectId(String); - method public static com.google.firebase.firestore.BsonTimestamp bsonTimestamp(long, long); method public static com.google.firebase.firestore.FieldValue delete(); method public static com.google.firebase.firestore.FieldValue increment(double); method public static com.google.firebase.firestore.FieldValue increment(long); - method public static com.google.firebase.firestore.Int32Value int32(int); - method public static com.google.firebase.firestore.MaxKey maxKey(); - method public static com.google.firebase.firestore.MinKey minKey(); - method public static com.google.firebase.firestore.RegexValue regex(String, String); method public static com.google.firebase.firestore.FieldValue serverTimestamp(); method public static com.google.firebase.firestore.VectorValue vector(double[]); } diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java index ecd2e80eacb..2dad1a7a678 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java @@ -49,34 +49,34 @@ public void writeAndReadBsonTypes() throws ExecutionException, InterruptedExcept testCollectionOnNightly() .add( map( - "bsonObjectId", FieldValue.bsonObjectId("507f191e810c19729de860ea"), - "regex", FieldValue.regex("^foo", "i"), - "bsonTimestamp", FieldValue.bsonTimestamp(1, 2), - "bsonBinary", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}), - "int32", FieldValue.int32(1), - "minKey", FieldValue.minKey(), - "maxKey", FieldValue.maxKey()))); + "bsonObjectId", new BsonObjectId("507f191e810c19729de860ea"), + "regex", new RegexValue("^foo", "i"), + "bsonTimestamp", new BsonTimestamp(1, 2), + "bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + "int32", new Int32Value(1), + "minKey", MinKey.instance(), + "maxKey", MaxKey.instance()))); waitFor( docRef.set( map( "bsonObjectId", - FieldValue.bsonObjectId("507f191e810c19729de860eb"), + new BsonObjectId("507f191e810c19729de860eb"), "regex", - FieldValue.regex("^foo", "m"), + new RegexValue("^foo", "m"), "bsonTimestamp", - FieldValue.bsonTimestamp(1, 3)), + new BsonTimestamp(1, 3)), SetOptions.merge())); - waitFor(docRef.update(map("int32", FieldValue.int32(2)))); + waitFor(docRef.update(map("int32", new Int32Value(2)))); - expected.put("bsonObjectId", FieldValue.bsonObjectId("507f191e810c19729de860eb")); - expected.put("regex", FieldValue.regex("^foo", "m")); - expected.put("bsonTimestamp", FieldValue.bsonTimestamp(1, 3)); - expected.put("bsonBinary", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})); - expected.put("int32", FieldValue.int32(2)); - expected.put("minKey", FieldValue.minKey()); - expected.put("maxKey", FieldValue.maxKey()); + expected.put("bsonObjectId", new BsonObjectId("507f191e810c19729de860eb")); + expected.put("regex", new RegexValue("^foo", "m")); + expected.put("bsonTimestamp", new BsonTimestamp(1, 3)); + expected.put("bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})); + expected.put("int32", new Int32Value(2)); + expected.put("minKey", MinKey.instance()); + expected.put("maxKey", MaxKey.instance()); DocumentSnapshot actual = waitFor(docRef.get()); @@ -101,30 +101,30 @@ public void writeAndReadBsonTypeOffline() throws ExecutionException, Interrupted Map expected = new HashMap<>(); docRef.set( map( - "bsonObjectId", FieldValue.bsonObjectId("507f191e810c19729de860ea"), - "regex", FieldValue.regex("^foo", "i"), - "bsonTimestamp", FieldValue.bsonTimestamp(1, 2), - "bsonBinary", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}), - "int32", FieldValue.int32(1), - "minKey", FieldValue.minKey(), - "maxKey", FieldValue.maxKey())); + "bsonObjectId", new BsonObjectId("507f191e810c19729de860ea"), + "regex", new RegexValue("^foo", "i"), + "bsonTimestamp", new BsonTimestamp(1, 2), + "bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + "int32", new Int32Value(1), + "minKey", MinKey.instance(), + "maxKey", MaxKey.instance())); docRef.update( map( "bsonObjectId", - FieldValue.bsonObjectId("507f191e810c19729de860eb"), + new BsonObjectId("507f191e810c19729de860eb"), "regex", - FieldValue.regex("^foo", "m"), + new RegexValue("^foo", "m"), "bsonTimestamp", - FieldValue.bsonTimestamp(1, 3))); + new BsonTimestamp(1, 3))); - expected.put("bsonObjectId", FieldValue.bsonObjectId("507f191e810c19729de860eb")); - expected.put("regex", FieldValue.regex("^foo", "m")); - expected.put("bsonTimestamp", FieldValue.bsonTimestamp(1, 3)); - expected.put("bsonBinary", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})); - expected.put("int32", FieldValue.int32(1)); - expected.put("minKey", FieldValue.minKey()); - expected.put("maxKey", FieldValue.maxKey()); + expected.put("bsonObjectId", new BsonObjectId("507f191e810c19729de860eb")); + expected.put("regex", new RegexValue("^foo", "m")); + expected.put("bsonTimestamp", new BsonTimestamp(1, 3)); + expected.put("bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})); + expected.put("int32", new Int32Value(1)); + expected.put("minKey", MinKey.instance()); + expected.put("maxKey", MaxKey.instance()); DocumentSnapshot actual = waitFor(docRef.get()); @@ -162,44 +162,49 @@ public void listenToDocumentsWithBsonTypes() throws Throwable { assertNull(docSnap); ref.set( map( - "purpose", "Bson types tests", + "purpose", + "Bson types tests", "bsonObjectId", - FieldValue.bsonObjectId("507f191e810c19729de860ea"), - "regex", FieldValue.regex("^foo", "i"), - "bsonTimestamp", FieldValue.bsonTimestamp(1, 2), - "bsonBinary", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}), - "int32", FieldValue.int32(1), - "minKey", FieldValue.minKey(), - "maxKey", FieldValue.maxKey())); + new BsonObjectId("507f191e810c19729de860ea"), + "regex", + new RegexValue("^foo", "i"), + "bsonTimestamp", + new BsonTimestamp(1, 2), + "bsonBinary", + BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + "int32", + new Int32Value(1), + "minKey", + MinKey.instance(), + "maxKey", + MaxKey.instance())); break; case 1: assertNotNull(docSnap); assertEquals( docSnap.getBsonBinaryData("bsonBinary"), - FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})); + BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})); assertEquals( docSnap.getBsonObjectId("bsonObjectId"), - FieldValue.bsonObjectId("507f191e810c19729de860ea")); + new BsonObjectId("507f191e810c19729de860ea")); + assertEquals(docSnap.getRegexValue("regex"), new RegexValue("^foo", "i")); assertEquals( - docSnap.getRegexValue("regex"), FieldValue.regex("^foo", "i")); - assertEquals( - docSnap.getBsonTimestamp("bsonTimestamp"), - FieldValue.bsonTimestamp(1, 2)); - assertEquals(docSnap.getInt32Value("int32"), FieldValue.int32(1)); - assertEquals(docSnap.getMinKey("minKey"), FieldValue.minKey()); - assertEquals(docSnap.getMaxKey("maxKey"), FieldValue.maxKey()); + docSnap.getBsonTimestamp("bsonTimestamp"), new BsonTimestamp(1, 2)); + assertEquals(docSnap.getInt32Value("int32"), new Int32Value(1)); + assertEquals(docSnap.getMinKey("minKey"), MinKey.instance()); + assertEquals(docSnap.getMaxKey("maxKey"), MaxKey.instance()); ref.set( map( "purpose", "Bson types tests", "bsonObjectId", - FieldValue.bsonObjectId("507f191e810c19729de860eb"), + new BsonObjectId("507f191e810c19729de860eb"), "regex", - FieldValue.regex("^foo", "m"), + new RegexValue("^foo", "m"), "bsonTimestamp", - FieldValue.bsonTimestamp(1, 3)), + new BsonTimestamp(1, 3)), SetOptions.merge()); break; case 2: @@ -207,19 +212,17 @@ public void listenToDocumentsWithBsonTypes() throws Throwable { assertEquals( docSnap.getBsonObjectId("bsonObjectId"), - FieldValue.bsonObjectId("507f191e810c19729de860eb")); - assertEquals( - docSnap.getRegexValue("regex"), FieldValue.regex("^foo", "m")); + new BsonObjectId("507f191e810c19729de860eb")); + assertEquals(docSnap.getRegexValue("regex"), new RegexValue("^foo", "m")); assertEquals( - docSnap.getBsonTimestamp("bsonTimestamp"), - FieldValue.bsonTimestamp(1, 3)); + docSnap.getBsonTimestamp("bsonTimestamp"), new BsonTimestamp(1, 3)); - ref.update(map("int32", FieldValue.int32(2))); + ref.update(map("int32", new Int32Value(2))); break; case 3: assertNotNull(docSnap); - assertEquals(docSnap.getInt32Value("int32"), FieldValue.int32(2)); + assertEquals(docSnap.getInt32Value("int32"), new Int32Value(2)); ref.delete(); break; @@ -254,11 +257,11 @@ public void filterAndOrderBsonObjectIds() throws Exception { Map> docs = map( "a", - map("key", FieldValue.bsonObjectId("507f191e810c19729de860ea")), + map("key", new BsonObjectId("507f191e810c19729de860ea")), "b", - map("key", FieldValue.bsonObjectId("507f191e810c19729de860eb")), + map("key", new BsonObjectId("507f191e810c19729de860eb")), "c", - map("key", FieldValue.bsonObjectId("507f191e810c19729de860ec"))); + map("key", new BsonObjectId("507f191e810c19729de860ec"))); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); // Pre-populate the cache with all docs @@ -267,14 +270,14 @@ public void filterAndOrderBsonObjectIds() throws Exception { Query orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) - .whereGreaterThan("key", FieldValue.bsonObjectId("507f191e810c19729de860ea")); + .whereGreaterThan("key", new BsonObjectId("507f191e810c19729de860ea")); assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) - .whereNotEqualTo("key", FieldValue.bsonObjectId("507f191e810c19729de860eb")); + .whereNotEqualTo("key", new BsonObjectId("507f191e810c19729de860eb")); assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "a")); } @@ -284,11 +287,11 @@ public void filterAndOrderBsonTimestamps() throws Exception { Map> docs = map( "a", - map("key", FieldValue.bsonTimestamp(1, 1)), + map("key", new BsonTimestamp(1, 1)), "b", - map("key", FieldValue.bsonTimestamp(1, 2)), + map("key", new BsonTimestamp(1, 2)), "c", - map("key", FieldValue.bsonTimestamp(2, 1))); + map("key", new BsonTimestamp(2, 1))); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); // Pre-populate the cache with all docs @@ -297,14 +300,14 @@ public void filterAndOrderBsonTimestamps() throws Exception { Query orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) - .whereGreaterThan("key", FieldValue.bsonTimestamp(1, 1)); + .whereGreaterThan("key", new BsonTimestamp(1, 1)); assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) - .whereNotEqualTo("key", FieldValue.bsonTimestamp(1, 2)); + .whereNotEqualTo("key", new BsonTimestamp(1, 2)); assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "a")); } @@ -314,11 +317,11 @@ public void filterAndOrderBsonBinaryData() throws Exception { Map> docs = map( "a", - map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})), + map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})), "b", - map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4})), + map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})), "c", - map("key", FieldValue.bsonBinaryData(2, new byte[] {1, 2, 2}))); + map("key", BsonBinaryData.fromBytes(2, new byte[] {1, 2, 2}))); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); // Pre-populate the cache with all docs @@ -327,14 +330,14 @@ public void filterAndOrderBsonBinaryData() throws Exception { Query orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) - .whereGreaterThan("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})); + .whereGreaterThan("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})); assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) - .whereNotEqualTo("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4})); + .whereNotEqualTo("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})); assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "a")); } @@ -343,9 +346,9 @@ public void filterAndOrderBsonBinaryData() throws Exception { public void filterAndOrderRegex() throws Exception { Map> docs = map( - "a", map("key", FieldValue.regex("^bar", "i")), - "b", map("key", FieldValue.regex("^bar", "m")), - "c", map("key", FieldValue.regex("^baz", "i"))); + "a", map("key", new RegexValue("^bar", "i")), + "b", map("key", new RegexValue("^bar", "m")), + "c", map("key", new RegexValue("^baz", "i"))); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); // Pre-populate the cache with all docs @@ -354,14 +357,14 @@ public void filterAndOrderRegex() throws Exception { Query orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) - .whereGreaterThan("key", FieldValue.regex("^bar", "i")); + .whereGreaterThan("key", new RegexValue("^bar", "i")); assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) - .whereNotEqualTo("key", FieldValue.regex("^bar", "m")); + .whereNotEqualTo("key", new RegexValue("^bar", "m")); assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "a")); } @@ -370,23 +373,21 @@ public void filterAndOrderRegex() throws Exception { public void filterAndOrderInt32() throws Exception { Map> docs = map( - "a", map("key", FieldValue.int32(-1)), - "b", map("key", FieldValue.int32(1)), - "c", map("key", FieldValue.int32(2))); + "a", map("key", new Int32Value(-1)), + "b", map("key", new Int32Value(1)), + "c", map("key", new Int32Value(2))); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); // Pre-populate the cache with all docs waitFor(randomColl.get()); Query orderedQuery = - randomColl - .orderBy("key", Direction.DESCENDING) - .whereGreaterThan("key", FieldValue.int32(-1)); + randomColl.orderBy("key", Direction.DESCENDING).whereGreaterThan("key", new Int32Value(-1)); assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); orderedQuery = - randomColl.orderBy("key", Direction.DESCENDING).whereNotEqualTo("key", FieldValue.int32(1)); + randomColl.orderBy("key", Direction.DESCENDING).whereNotEqualTo("key", new Int32Value(1)); assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "a")); } @@ -395,11 +396,11 @@ public void filterAndOrderInt32() throws Exception { public void filterAndOrderMinKey() throws Exception { Map> docs = map( - "a", map("key", FieldValue.minKey()), - "b", map("key", FieldValue.minKey()), + "a", map("key", MinKey.instance()), + "b", map("key", MinKey.instance()), "c", map("key", null), "d", map("key", 1L), - "e", map("key", FieldValue.maxKey())); + "e", map("key", MaxKey.instance())); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); // Pre-populate the cache with all docs @@ -411,24 +412,24 @@ public void filterAndOrderMinKey() throws Exception { "key", Direction .DESCENDING) // minKeys are equal, would sort by documentId as secondary order - .whereEqualTo("key", FieldValue.minKey()); + .whereEqualTo("key", MinKey.instance()); assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("b", "a")); // TODO(Mila/BSON): uncomment this test when null value inclusion is fixed - // query = randomColl.whereNotEqualTo("key", FieldValue.minKey()); + // query = randomColl.whereNotEqualTo("key", MinKey.instance()); // assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("d", "e")); - query = randomColl.whereGreaterThanOrEqualTo("key", FieldValue.minKey()); + query = randomColl.whereGreaterThanOrEqualTo("key", MinKey.instance()); assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("a", "b")); - query = randomColl.whereLessThanOrEqualTo("key", FieldValue.minKey()); + query = randomColl.whereLessThanOrEqualTo("key", MinKey.instance()); assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("a", "b")); - query = randomColl.whereGreaterThan("key", FieldValue.minKey()); + query = randomColl.whereGreaterThan("key", MinKey.instance()); assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList()); - query = randomColl.whereGreaterThan("key", FieldValue.minKey()); + query = randomColl.whereGreaterThan("key", MinKey.instance()); assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList()); } @@ -436,10 +437,10 @@ public void filterAndOrderMinKey() throws Exception { public void filterAndOrderMaxKey() throws Exception { Map> docs = map( - "a", map("key", FieldValue.minKey()), + "a", map("key", MinKey.instance()), "b", map("key", 1L), - "c", map("key", FieldValue.maxKey()), - "d", map("key", FieldValue.maxKey()), + "c", map("key", MaxKey.instance()), + "d", map("key", MaxKey.instance()), "e", map("key", null)); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); @@ -452,24 +453,24 @@ public void filterAndOrderMaxKey() throws Exception { "key", Direction .DESCENDING) // maxKeys are equal, would sort by documentId as secondary order - .whereEqualTo("key", FieldValue.maxKey()); + .whereEqualTo("key", MaxKey.instance()); assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("d", "c")); // TODO(Mila/BSON): uncomment this test when null value inclusion is fixed - // query = randomColl.whereNotEqualTo("key", FieldValue.maxKey()); + // query = randomColl.whereNotEqualTo("key", MaxKey.instance()); // assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("a", "b")); - query = randomColl.whereGreaterThanOrEqualTo("key", FieldValue.maxKey()); + query = randomColl.whereGreaterThanOrEqualTo("key", MaxKey.instance()); assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("c", "d")); - query = randomColl.whereLessThanOrEqualTo("key", FieldValue.maxKey()); + query = randomColl.whereLessThanOrEqualTo("key", MaxKey.instance()); assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("c", "d")); - query = randomColl.whereLessThan("key", FieldValue.maxKey()); + query = randomColl.whereLessThan("key", MaxKey.instance()); assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList()); - query = randomColl.whereGreaterThan("key", FieldValue.maxKey()); + query = randomColl.whereGreaterThan("key", MaxKey.instance()); assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList()); } @@ -477,11 +478,11 @@ public void filterAndOrderMaxKey() throws Exception { public void filterNullValueWithBsonTypes() throws Exception { Map> docs = map( - "a", map("key", FieldValue.minKey()), + "a", map("key", MinKey.instance()), "b", map("key", null), "c", map("key", null), "d", map("key", 1L), - "e", map("key", FieldValue.maxKey())); + "e", map("key", MaxKey.instance())); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); // Pre-populate the cache with all docs @@ -499,43 +500,43 @@ public void orderBsonTypesTogether() throws Exception { Map> docs = map( "bsonObjectId1", - map("key", FieldValue.bsonObjectId("507f191e810c19729de860ea")), + map("key", new BsonObjectId("507f191e810c19729de860ea")), "bsonObjectId2", - map("key", FieldValue.bsonObjectId("507f191e810c19729de860eb")), + map("key", new BsonObjectId("507f191e810c19729de860eb")), "bsonObjectId3", - map("key", FieldValue.bsonObjectId("407f191e810c19729de860ea")), + map("key", new BsonObjectId("407f191e810c19729de860ea")), "regex1", - map("key", FieldValue.regex("^bar", "m")), + map("key", new RegexValue("^bar", "m")), "regex2", - map("key", FieldValue.regex("^bar", "i")), + map("key", new RegexValue("^bar", "i")), "regex3", - map("key", FieldValue.regex("^baz", "i")), + map("key", new RegexValue("^baz", "i")), "bsonTimestamp1", - map("key", FieldValue.bsonTimestamp(2, 0)), + map("key", new BsonTimestamp(2, 0)), "bsonTimestamp2", - map("key", FieldValue.bsonTimestamp(1, 2)), + map("key", new BsonTimestamp(1, 2)), "bsonTimestamp3", - map("key", FieldValue.bsonTimestamp(1, 1)), + map("key", new BsonTimestamp(1, 1)), "bsonBinary1", - map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})), + map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})), "bsonBinary2", - map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4})), + map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})), "bsonBinary3", - map("key", FieldValue.bsonBinaryData(2, new byte[] {1, 2, 2})), + map("key", BsonBinaryData.fromBytes(2, new byte[] {1, 2, 2})), "int32Value1", - map("key", FieldValue.int32(-1)), + map("key", new Int32Value(-1)), "int32Value2", - map("key", FieldValue.int32(1)), + map("key", new Int32Value(1)), "int32Value3", - map("key", FieldValue.int32(0)), + map("key", new Int32Value(0)), "minKey1", - map("key", FieldValue.minKey()), + map("key", MinKey.instance()), "minKey2", - map("key", FieldValue.minKey()), + map("key", MinKey.instance()), "maxKey1", - map("key", FieldValue.maxKey()), + map("key", MaxKey.instance()), "maxKey2", - map("key", FieldValue.maxKey())); + map("key", MaxKey.instance())); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); // Pre-populate the cache with all docs @@ -571,9 +572,9 @@ public void orderBsonTypesTogether() throws Exception { public void canRunTransactionsOnDocumentsWithBsonTypes() throws Exception { Map> docs = map( - "a", map("key", FieldValue.bsonObjectId("507f191e810c19729de860ea")), - "b", map("key", FieldValue.regex("^foo", "i")), - "c", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); + "a", map("key", new BsonObjectId("507f191e810c19729de860ea")), + "b", map("key", new RegexValue("^foo", "i")), + "c", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); waitFor( @@ -581,9 +582,8 @@ public void canRunTransactionsOnDocumentsWithBsonTypes() throws Exception { transaction -> { DocumentSnapshot docSnap = transaction.get(randomColl.document("a")); assertEquals( - docSnap.getBsonObjectId("key"), - FieldValue.bsonObjectId("507f191e810c19729de860ea")); - transaction.update(randomColl.document("b"), "key", FieldValue.regex("^bar", "i")); + docSnap.getBsonObjectId("key"), new BsonObjectId("507f191e810c19729de860ea")); + transaction.update(randomColl.document("b"), "key", new RegexValue("^bar", "i")); transaction.delete(randomColl.document("c")); return null; })); @@ -596,8 +596,8 @@ public void canRunTransactionsOnDocumentsWithBsonTypes() throws Exception { assertTrue(getSnapshotDocIds.equals(Arrays.asList("a", "b"))); assertEquals( getSnapshot.getDocuments().get(0).getBsonObjectId("key"), - FieldValue.bsonObjectId("507f191e810c19729de860ea")); + new BsonObjectId("507f191e810c19729de860ea")); assertEquals( - getSnapshot.getDocuments().get(1).getRegexValue("key"), FieldValue.regex("^bar", "i")); + getSnapshot.getDocuments().get(1).getRegexValue("key"), new RegexValue("^bar", "i")); } } diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java index 90d707b089c..52cc1d3892f 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java @@ -69,13 +69,13 @@ public POJO(double number, String str, DocumentReference documentReference) { this.timestamp = new Timestamp(123, 123456000); this.blob = Blob.fromBytes(new byte[] {3, 1, 4, 1, 5}); this.geoPoint = new GeoPoint(3.1415, 9.2653); - this.bsonObjectId = FieldValue.bsonObjectId("507f191e810c19729de860ea"); - this.bsonBinaryData = FieldValue.bsonBinaryData(1, new byte[] {3, 1, 4, 1, 5}); - this.bsonTimestamp = FieldValue.bsonTimestamp(1, 2); - this.regexValue = FieldValue.regex("^foo", "i"); - this.int32Value = FieldValue.int32(1); - this.minKey = FieldValue.minKey(); - this.maxKey = FieldValue.maxKey(); + this.bsonObjectId = new BsonObjectId("507f191e810c19729de860ea"); + this.bsonBinaryData = BsonBinaryData.fromBytes(1, new byte[] {3, 1, 4, 1, 5}); + this.bsonTimestamp = new BsonTimestamp(1, 2); + this.regexValue = new RegexValue("^foo", "i"); + this.int32Value = new Int32Value(1); + this.minKey = MinKey.instance(); + this.maxKey = MaxKey.instance(); } public double getNumber() { diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java index f3602c59fdc..2d2fa073599 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java @@ -14,8 +14,6 @@ package com.google.firebase.firestore; -import static com.google.firebase.firestore.FieldValue.maxKey; -import static com.google.firebase.firestore.FieldValue.minKey; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.assertSDKQueryResultsConsistentWithBackend; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testCollection; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testCollectionOnNightly; @@ -112,12 +110,12 @@ public void testCanReadAndWriteDocumentReferencesInLists() { @Test public void testCanReadAndWriteMinKey() { - verifySuccessfulWriteReadCycle(map("minKey", minKey()), testDocumentOnNightly()); + verifySuccessfulWriteReadCycle(map("minKey", MinKey.instance()), testDocumentOnNightly()); } @Test public void testCanReadAndWriteMaxKey() { - verifySuccessfulWriteReadCycle(map("maxKey", maxKey()), testDocumentOnNightly()); + verifySuccessfulWriteReadCycle(map("maxKey", MaxKey.instance()), testDocumentOnNightly()); } @Test @@ -162,13 +160,13 @@ public void testCanReadAndWriteBsonBinaryValue() { public void testCanReadAndWriteBsonTypesInLists() { List data = Arrays.asList( - FieldValue.bsonObjectId("507f191e810c19729de860ea"), - FieldValue.regex("^foo", "i"), - FieldValue.bsonTimestamp(1, 2), - FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}), - FieldValue.int32(1), - FieldValue.minKey(), - FieldValue.maxKey()); + new BsonObjectId("507f191e810c19729de860ea"), + new RegexValue("^foo", "i"), + new BsonTimestamp(1, 2), + BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + new Int32Value(1), + MinKey.instance(), + MaxKey.instance()); verifySuccessfulWriteReadCycle(map("BsonTypes", data), testDocumentOnNightly()); } @@ -177,13 +175,13 @@ public void testCanReadAndWriteBsonTypesInLists() { public void testCanReadAndWriteBsonTypesInMaps() { Map data = map( - "bsonObjectId", FieldValue.bsonObjectId("507f191e810c19729de860ea"), - "regex", FieldValue.regex("^foo", "i"), - "bsonTimestamp", FieldValue.bsonTimestamp(1, 2), - "bsonBinary", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}), - "int32", FieldValue.int32(1), - "minKey", FieldValue.minKey(), - "maxKey", FieldValue.maxKey()); + "bsonObjectId", new BsonObjectId("507f191e810c19729de860ea"), + "regex", new RegexValue("^foo", "i"), + "bsonTimestamp", new BsonTimestamp(1, 2), + "bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + "int32", new Int32Value(1), + "minKey", MinKey.instance(), + "maxKey", MaxKey.instance()); verifySuccessfulWriteReadCycle(map("BsonTypes", data), testDocumentOnNightly()); } @@ -192,7 +190,7 @@ public void testCanReadAndWriteBsonTypesInMaps() { public void invalidRegexGetsRejected() throws Exception { Exception error = null; try { - waitFor(testDocumentOnNightly().set(map("key", FieldValue.regex("foo", "a")))); + waitFor(testDocumentOnNightly().set(map("key", new RegexValue("foo", "a")))); } catch (Exception e) { error = e; } @@ -209,7 +207,7 @@ public void invalidBsonObjectIdGetsRejected() throws Exception { Exception error = null; try { // bsonObjectId with length not equal to 24 gets rejected - waitFor(testDocumentOnNightly().set(map("key", FieldValue.bsonObjectId("foobar")))); + waitFor(testDocumentOnNightly().set(map("key", new BsonObjectId("foobar")))); } catch (Exception e) { error = e; } @@ -223,7 +221,7 @@ public void invalidBsonBinaryDataGetsRejected() throws Exception { try { waitFor( testDocumentOnNightly() - .set(map("key", FieldValue.bsonBinaryData(1234, new byte[] {1, 2, 3})))); + .set(map("key", BsonBinaryData.fromBytes(1234, new byte[] {1, 2, 3})))); } catch (Exception e) { error = e; } @@ -239,7 +237,7 @@ public void invalidBsonBinaryDataGetsRejected() throws Exception { public void invalidBsonTimestampDataGetsRejected() throws Exception { Exception error = null; try { - waitFor(testDocumentOnNightly().set(map("key", FieldValue.bsonTimestamp(-1, 1)))); + waitFor(testDocumentOnNightly().set(map("key", new BsonTimestamp(-1, 1)))); } catch (Exception e) { error = e; } @@ -251,7 +249,7 @@ public void invalidBsonTimestampDataGetsRejected() throws Exception { "The field 'seconds' value (-1) does not represent an unsigned 32-bit integer.")); try { - waitFor(testDocumentOnNightly().set(map("key", FieldValue.bsonTimestamp(4294967296L, 1)))); + waitFor(testDocumentOnNightly().set(map("key", new BsonTimestamp(4294967296L, 1)))); } catch (Exception e) { error = e; } @@ -263,7 +261,7 @@ public void invalidBsonTimestampDataGetsRejected() throws Exception { "The field 'seconds' value (4294967296) does not represent an unsigned 32-bit integer.")); try { - waitFor(testDocumentOnNightly().set(map("key", FieldValue.bsonTimestamp(1, -1)))); + waitFor(testDocumentOnNightly().set(map("key", new BsonTimestamp(1, -1)))); } catch (Exception e) { error = e; } @@ -275,7 +273,7 @@ public void invalidBsonTimestampDataGetsRejected() throws Exception { "The field 'increment' value (-1) does not represent an unsigned 32-bit integer.")); try { - waitFor(testDocumentOnNightly().set(map("key", FieldValue.bsonTimestamp(1, 4294967296L)))); + waitFor(testDocumentOnNightly().set(map("key", new BsonTimestamp(1, 4294967296L)))); } catch (Exception e) { error = e; } @@ -329,9 +327,9 @@ public void testCanUseTypedAccessors() { "bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), "minKey", - minKey(), + MinKey.instance(), "maxKey", - maxKey()); + MaxKey.instance()); waitFor(doc.set(data)); DocumentSnapshot snapshot = waitFor(doc.get()); @@ -405,7 +403,7 @@ public void snapshotListenerSortsDifferentTypesSameAsServer() throws Exception { "null", map("value", null), "min", - map("value", FieldValue.minKey()), + map("value", MinKey.instance()), "boolean", map("value", true), "nan", @@ -441,7 +439,7 @@ public void snapshotListenerSortsDifferentTypesSameAsServer() throws Exception { "map", map("value", map("key", true)), "max", - map("value", FieldValue.maxKey())); + map("value", MaxKey.instance())); writeTestDocsOnCollection(colRef, testDocs); diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/FieldValue.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/FieldValue.java index 6c62faf5ada..f899457acdb 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/FieldValue.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/FieldValue.java @@ -48,7 +48,7 @@ String getMethodName() { } } - /* {@code FieldValue} class for {@link #arrayUnion()} transforms. */ + /** {@code FieldValue} class for {@link #arrayUnion()} transforms. */ static class ArrayUnionFieldValue extends FieldValue { private final List elements; @@ -66,7 +66,7 @@ List getElements() { } } - /* {@code FieldValue} class for {@link #arrayRemove()} transforms. */ + /** {@code FieldValue} class for {@link #arrayRemove()} transforms. */ static class ArrayRemoveFieldValue extends FieldValue { private final List elements; @@ -84,7 +84,7 @@ List getElements() { } } - /* {@code FieldValue} class for {@link #increment()} transforms. */ + /** {@code FieldValue} class for {@link #increment()} transforms. */ static class NumericIncrementFieldValue extends FieldValue { private final Number operand; @@ -193,82 +193,4 @@ public static FieldValue increment(double l) { public static VectorValue vector(@NonNull double[] values) { return new VectorValue(values); } - - /** - * Creates a new {@link RegexValue} constructed using the given pattern and options. - * - * @param pattern The pattern to use for the new regular expression. - * @param options The options to use for the new regular expression. - * @return A new {@link RegexValue} constructed using the given pattern and options. - */ - @NonNull - public static RegexValue regex(@NonNull String pattern, @NonNull String options) { - return new RegexValue(pattern, options); - } - - /** - * Creates a new {@link Int32Value} constructed using the given number. - * - * @param value The number to use for constructing the Int32Value object. - * @return A new {@link Int32Value} constructed using the number. - */ - @NonNull - public static Int32Value int32(int value) { - return new Int32Value(value); - } - - /** - * Creates a new {@link BsonTimestamp} constructed using the given values. - * - * @param seconds The seconds value to use for the new BSON Timestamp. - * @param increment The increment value to use for the new BSON Timestamp. - * @return A new {@link BsonTimestamp} constructed using the given values. - */ - @NonNull - public static BsonTimestamp bsonTimestamp(long seconds, long increment) { - return new BsonTimestamp(seconds, increment); - } - - /** - * Creates a new {@link BsonObjectId} constructed using the given value. - * - * @param oid The 24-character hex string representation of the ObjectId. - * @return A new {@link BsonObjectId} constructed using the given value. - */ - @NonNull - public static BsonObjectId bsonObjectId(@NonNull String oid) { - return new BsonObjectId(oid); - } - - /** - * Creates a new {@link BsonBinaryData} constructed using the given values. - * - * @param subtype The subtype for the data. - * @param data The binary data as a byte array. - * @return A new {@link BsonBinaryData} constructed using the given values. - */ - @NonNull - public static BsonBinaryData bsonBinaryData(int subtype, @NonNull byte[] data) { - return BsonBinaryData.fromBytes(subtype, data); - } - - /** - * Returns a {@link MinKey} value. - * - * @return A {@link MinKey} object which is the same as all MinKey objects. - */ - @NonNull - public static MinKey minKey() { - return MinKey.instance(); - } - - /** - * Returns a {@link MaxKey} value. - * - * @return A {@link MaxKey} object which is the same as all MaxKey objects. - */ - @NonNull - public static MaxKey maxKey() { - return MaxKey.instance(); - } } diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataWriter.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataWriter.java index 805dee56ae0..2ad4af8d2c0 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataWriter.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataWriter.java @@ -110,9 +110,9 @@ public Object convertValue(Value value) { case TYPE_ORDER_REGEX: return convertRegex(value.getMapValue().getFieldsMap()); case TYPE_ORDER_MAX_KEY: - return FieldValue.maxKey(); + return MaxKey.instance(); case TYPE_ORDER_MIN_KEY: - return FieldValue.minKey(); + return MinKey.instance(); default: throw fail("Unknown value type: " + value.getValueTypeCase()); diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/BsonTypesTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/BsonTypesTest.java index b1df92b4082..34ae59306db 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/BsonTypesTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/BsonTypesTest.java @@ -30,7 +30,7 @@ public class BsonTypesTest { @Test public void testBsonObjectIdEquality() { BsonObjectId bsonObjectId = new BsonObjectId("507f191e810c19729de860ea"); - BsonObjectId bsonObjectIdDup = FieldValue.bsonObjectId("507f191e810c19729de860ea"); + BsonObjectId bsonObjectIdDup = new BsonObjectId("507f191e810c19729de860ea"); BsonObjectId differentObjectId = new BsonObjectId("507f191e810c19729de860eb"); assertEquals(bsonObjectId, bsonObjectIdDup); @@ -45,7 +45,7 @@ public void testBsonObjectIdEquality() { @Test public void testBsonTimeStampEquality() { BsonTimestamp bsonTimestamp = new BsonTimestamp(1, 2); - BsonTimestamp bsonTimestampDup = FieldValue.bsonTimestamp(1, 2); + BsonTimestamp bsonTimestampDup = new BsonTimestamp(1, 2); BsonTimestamp differentSecondsTimestamp = new BsonTimestamp(2, 2); BsonTimestamp differentIncrementTimestamp = new BsonTimestamp(1, 3); @@ -65,7 +65,7 @@ public void testBsonTimeStampEquality() { @Test public void testBsonBinaryDataEquality() { BsonBinaryData bsonBinaryData = BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}); - BsonBinaryData bsonBinaryDataDup = FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}); + BsonBinaryData bsonBinaryDataDup = BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}); BsonBinaryData differentSubtypeBinaryData = BsonBinaryData.fromBytes(2, new byte[] {1, 2, 3}); BsonBinaryData differentDataBinaryData = BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}); @@ -85,7 +85,7 @@ public void testBsonBinaryDataEquality() { @Test public void testRegexEquality() { RegexValue regex = new RegexValue("^foo", "i"); - RegexValue regexDup = FieldValue.regex("^foo", "i"); + RegexValue regexDup = new RegexValue("^foo", "i"); RegexValue differentPatternRegex = new RegexValue("^bar", "i"); RegexValue differentOptionsRegex = new RegexValue("^foo", "m"); @@ -105,7 +105,7 @@ public void testRegexEquality() { @Test public void testInt32Equality() { Int32Value int32 = new Int32Value(1); - Int32Value int32Dup = FieldValue.int32(1); + Int32Value int32Dup = new Int32Value(1); Int32Value differentInt32 = new Int32Value(2); assertEquals(int32, int32Dup); @@ -119,7 +119,7 @@ public void testInt32Equality() { @Test public void testMaxKeyIsSingleton() { - MaxKey maxKey = FieldValue.maxKey(); + MaxKey maxKey = MaxKey.instance(); MaxKey maxKeyDup = MaxKey.instance(); assertEquals(maxKey, maxKeyDup); assertEquals(maxKey.hashCode(), maxKeyDup.hashCode()); @@ -127,7 +127,7 @@ public void testMaxKeyIsSingleton() { @Test public void testMinKeyIsSingleton() { - MinKey minKey = FieldValue.minKey(); + MinKey minKey = MinKey.instance(); MinKey minKeyDup = MinKey.instance(); assertEquals(minKey, minKeyDup); assertEquals(minKey.hashCode(), minKeyDup.hashCode()); @@ -135,8 +135,8 @@ public void testMinKeyIsSingleton() { @Test public void testMinKeyMaxKeyNullNotEqual() { - MinKey minKey = FieldValue.minKey(); - MaxKey maxKey = FieldValue.maxKey(); + MinKey minKey = MinKey.instance(); + MaxKey maxKey = MaxKey.instance(); assertNotEquals(minKey, maxKey); assertNotEquals(minKey, null); assertNotEquals(maxKey, null); diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/FieldValueTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/FieldValueTest.java index 58ed3a9f080..a8837e06b1c 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/FieldValueTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/FieldValueTest.java @@ -32,20 +32,20 @@ public void testEquals() { FieldValue deleteDup = FieldValue.delete(); FieldValue serverTimestamp = FieldValue.serverTimestamp(); FieldValue serverTimestampDup = FieldValue.serverTimestamp(); - RegexValue regex = FieldValue.regex("pattern", "options"); - RegexValue regexDup = FieldValue.regex("pattern", "options"); - Int32Value int32 = FieldValue.int32(1); - Int32Value int32Dup = FieldValue.int32(1); - BsonTimestamp bsonTimestamp = FieldValue.bsonTimestamp(1, 2); - BsonTimestamp bsonTimestampDup = FieldValue.bsonTimestamp(1, 2); - BsonObjectId bsonObjectId = FieldValue.bsonObjectId("507f191e810c19729de860ea"); - BsonObjectId bsonObjectIdDup = FieldValue.bsonObjectId("507f191e810c19729de860ea"); - BsonBinaryData bsonBinary = FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}); - BsonBinaryData bsonBinaryDup = FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}); - MinKey minKey = FieldValue.minKey(); - MinKey minKeyDup = FieldValue.minKey(); - MaxKey maxKey = FieldValue.maxKey(); - MaxKey maxKeyDup = FieldValue.maxKey(); + RegexValue regex = new RegexValue("pattern", "options"); + RegexValue regexDup = new RegexValue("pattern", "options"); + Int32Value int32 = new Int32Value(1); + Int32Value int32Dup = new Int32Value(1); + BsonTimestamp bsonTimestamp = new BsonTimestamp(1, 2); + BsonTimestamp bsonTimestampDup = new BsonTimestamp(1, 2); + BsonObjectId bsonObjectId = new BsonObjectId("507f191e810c19729de860ea"); + BsonObjectId bsonObjectIdDup = new BsonObjectId("507f191e810c19729de860ea"); + BsonBinaryData bsonBinary = BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}); + BsonBinaryData bsonBinaryDup = BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}); + MinKey minKey = MinKey.instance(); + MinKey minKeyDup = MinKey.instance(); + MaxKey maxKey = MaxKey.instance(); + MaxKey maxKeyDup = MaxKey.instance(); assertEquals(delete, deleteDup); assertEquals(serverTimestamp, serverTimestampDup); diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/UserDataWriterTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/UserDataWriterTest.java index 3175ed05b6a..e6a12ee3e95 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/UserDataWriterTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/UserDataWriterTest.java @@ -223,7 +223,7 @@ public void testConvertsGeoPointValue() { @Test public void testConvertsBsonObjectIdValue() { - List testCases = asList(new BsonObjectId("foo"), FieldValue.bsonObjectId("bar")); + List testCases = asList(new BsonObjectId("foo"), new BsonObjectId("bar")); for (BsonObjectId p : testCases) { Value value = wrap(p); Object convertedValue = convertValue(value); @@ -233,7 +233,7 @@ public void testConvertsBsonObjectIdValue() { @Test public void testConvertsBsonTimestampValue() { - List testCases = asList(new BsonTimestamp(1, 2), FieldValue.bsonTimestamp(3, 4)); + List testCases = asList(new BsonTimestamp(1, 2), new BsonTimestamp(3, 4)); for (BsonTimestamp p : testCases) { Value value = wrap(p); Object convertedValue = convertValue(value); @@ -247,7 +247,7 @@ public void testConvertsBsonBinaryValue() { asList( BsonBinaryData.fromBytes(1, new byte[] {1, 2}), BsonBinaryData.fromByteString(1, ByteString.EMPTY), - FieldValue.bsonBinaryData(1, new byte[] {1, 2})); + BsonBinaryData.fromBytes(1, new byte[] {1, 2})); for (BsonBinaryData p : testCases) { Value value = wrap(p); Object convertedValue = convertValue(value); @@ -257,7 +257,7 @@ public void testConvertsBsonBinaryValue() { @Test public void testConvertsRegexValue() { - List testCases = asList(new RegexValue("^foo", "i"), FieldValue.regex("^bar", "g")); + List testCases = asList(new RegexValue("^foo", "i"), new RegexValue("^bar", "g")); for (RegexValue p : testCases) { Value value = wrap(p); Object convertedValue = convertValue(value); @@ -268,7 +268,7 @@ public void testConvertsRegexValue() { @Test public void testConvertsInt32Value() { List testCases = - asList(new Int32Value(1), new Int32Value(-1), new Int32Value(0), FieldValue.int32(123)); + asList(new Int32Value(1), new Int32Value(-1), new Int32Value(0), new Int32Value(123)); for (Int32Value p : testCases) { Value value = wrap(p); Object convertedValue = convertValue(value); @@ -278,7 +278,7 @@ public void testConvertsInt32Value() { @Test public void testConvertsMinKey() { - List testCases = asList(FieldValue.minKey(), MinKey.instance()); + List testCases = asList(MinKey.instance(), MinKey.instance()); for (MinKey p : testCases) { Value value = wrap(p); Object convertedValue = convertValue(value); @@ -288,7 +288,7 @@ public void testConvertsMinKey() { @Test public void testConvertsMaxKey() { - List testCases = asList(FieldValue.maxKey(), MaxKey.instance()); + List testCases = asList(MaxKey.instance(), MaxKey.instance()); for (MaxKey p : testCases) { Value value = wrap(p); Object convertedValue = convertValue(value); diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/core/QueryTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/core/QueryTest.java index 7ee54cda372..7f7f66e01fc 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/core/QueryTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/core/QueryTest.java @@ -34,8 +34,14 @@ import com.google.firebase.Timestamp; import com.google.firebase.firestore.Blob; -import com.google.firebase.firestore.FieldValue; +import com.google.firebase.firestore.BsonBinaryData; +import com.google.firebase.firestore.BsonObjectId; +import com.google.firebase.firestore.BsonTimestamp; import com.google.firebase.firestore.GeoPoint; +import com.google.firebase.firestore.Int32Value; +import com.google.firebase.firestore.MaxKey; +import com.google.firebase.firestore.MinKey; +import com.google.firebase.firestore.RegexValue; import com.google.firebase.firestore.model.DocumentKey; import com.google.firebase.firestore.model.MutableDocument; import com.google.firebase.firestore.model.ResourcePath; @@ -843,25 +849,25 @@ public void testCanonicalIdsAreStable() { // BSON types assertCanonicalId( - baseQuery.filter(filter("a", "<=", FieldValue.bsonObjectId("foo"))), + baseQuery.filter(filter("a", "<=", new BsonObjectId("foo"))), "collection|f:a<={__oid__:foo}|ob:aasc__name__asc"); assertCanonicalId( - baseQuery.filter(filter("a", "<=", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))), + baseQuery.filter(filter("a", "<=", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))), "collection|f:a<={__binary__:01010203}|ob:aasc__name__asc"); assertCanonicalId( - baseQuery.filter(filter("a", "<=", FieldValue.bsonTimestamp(1, 2))), + baseQuery.filter(filter("a", "<=", new BsonTimestamp(1, 2))), "collection|f:a<={__request_timestamp__:{increment:2,seconds:1}}|ob:aasc__name__asc"); assertCanonicalId( - baseQuery.filter(filter("a", "<=", FieldValue.regex("^foo", "i"))), + baseQuery.filter(filter("a", "<=", new RegexValue("^foo", "i"))), "collection|f:a<={__regex__:{options:i,pattern:^foo}}|ob:aasc__name__asc"); assertCanonicalId( - baseQuery.filter(filter("a", "<=", FieldValue.int32(1))), + baseQuery.filter(filter("a", "<=", new Int32Value(1))), "collection|f:a<={__int__:1}|ob:aasc__name__asc"); assertCanonicalId( - baseQuery.filter(filter("a", "<=", FieldValue.minKey())), + baseQuery.filter(filter("a", "<=", MinKey.instance())), "collection|f:a<={__min__:null}|ob:aasc__name__asc"); assertCanonicalId( - baseQuery.filter(filter("a", "<=", FieldValue.maxKey())), + baseQuery.filter(filter("a", "<=", MaxKey.instance())), "collection|f:a<={__max__:null}|ob:aasc__name__asc"); } diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/index/FirestoreIndexValueWriterTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/index/FirestoreIndexValueWriterTest.java index a8cc48daaa2..6f1eea8cbbb 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/index/FirestoreIndexValueWriterTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/index/FirestoreIndexValueWriterTest.java @@ -106,7 +106,7 @@ public void writeIndexValueSupportsEmptyVector() { public void writeIndexValueSupportsBsonObjectId() throws ExecutionException, InterruptedException { UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); - Value value = dataReader.parseQueryValue(FieldValue.bsonObjectId("507f191e810c19729de860ea")); + Value value = dataReader.parseQueryValue(new BsonObjectId("507f191e810c19729de860ea")); // Encode an actual ObjectIdValue IndexByteEncoder encoder = new IndexByteEncoder(); @@ -131,7 +131,7 @@ public void writeIndexValueSupportsBsonObjectId() public void writeIndexValueSupportsBsonBinaryData() throws ExecutionException, InterruptedException { UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); - Value value = dataReader.parseQueryValue(FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})); + Value value = dataReader.parseQueryValue(BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})); // Encode an actual BSONBinaryDataValue IndexByteEncoder encoder = new IndexByteEncoder(); @@ -157,7 +157,7 @@ public void writeIndexValueSupportsBsonBinaryData() public void writeIndexValueSupportsBsonBinaryWithEmptyData() throws ExecutionException, InterruptedException { UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); - Value value = dataReader.parseQueryValue(FieldValue.bsonBinaryData(1, new byte[] {})); + Value value = dataReader.parseQueryValue(BsonBinaryData.fromBytes(1, new byte[] {})); // Encode an actual BSONBinaryDataValue IndexByteEncoder encoder = new IndexByteEncoder(); @@ -183,7 +183,7 @@ public void writeIndexValueSupportsBsonBinaryWithEmptyData() public void writeIndexValueSupportsBsonTimestamp() throws ExecutionException, InterruptedException { UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); - Value value = dataReader.parseQueryValue(FieldValue.bsonTimestamp(1, 2)); + Value value = dataReader.parseQueryValue(new BsonTimestamp(1, 2)); // Encode an actual BSONTimestampValue IndexByteEncoder encoder = new IndexByteEncoder(); @@ -207,7 +207,7 @@ public void writeIndexValueSupportsBsonTimestamp() public void writeIndexValueSupportsLargestBsonTimestamp() throws ExecutionException, InterruptedException { UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); - Value value = dataReader.parseQueryValue(FieldValue.bsonTimestamp(4294967295L, 4294967295L)); + Value value = dataReader.parseQueryValue(new BsonTimestamp(4294967295L, 4294967295L)); // Encode an actual BSONTimestampValue IndexByteEncoder encoder = new IndexByteEncoder(); @@ -232,7 +232,7 @@ public void writeIndexValueSupportsLargestBsonTimestamp() public void writeIndexValueSupportsSmallestBsonTimestamp() throws ExecutionException, InterruptedException { UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); - Value value = dataReader.parseQueryValue(FieldValue.bsonTimestamp(0, 0)); + Value value = dataReader.parseQueryValue(new BsonTimestamp(0, 0)); // Encode an actual BSONTimestampValue IndexByteEncoder encoder = new IndexByteEncoder(); @@ -255,7 +255,7 @@ public void writeIndexValueSupportsSmallestBsonTimestamp() @Test public void writeIndexValueSupportsRegex() throws ExecutionException, InterruptedException { UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); - Value value = dataReader.parseQueryValue(FieldValue.regex("^foo", "i")); + Value value = dataReader.parseQueryValue(new RegexValue("^foo", "i")); IndexByteEncoder encoder = new IndexByteEncoder(); FirestoreIndexValueWriter.INSTANCE.writeIndexValue( value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); @@ -278,7 +278,7 @@ public void writeIndexValueSupportsRegex() throws ExecutionException, Interrupte @Test public void writeIndexValueSupportsInt32() throws ExecutionException, InterruptedException { UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); - Value value = dataReader.parseQueryValue(FieldValue.int32(1)); + Value value = dataReader.parseQueryValue(new Int32Value(1)); IndexByteEncoder encoder = new IndexByteEncoder(); FirestoreIndexValueWriter.INSTANCE.writeIndexValue( value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); @@ -300,7 +300,7 @@ public void writeIndexValueSupportsInt32() throws ExecutionException, Interrupte public void writeIndexValueSupportsLargestInt32() throws ExecutionException, InterruptedException { UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); - Value value = dataReader.parseQueryValue(FieldValue.int32(2147483647)); + Value value = dataReader.parseQueryValue(new Int32Value(2147483647)); IndexByteEncoder encoder = new IndexByteEncoder(); FirestoreIndexValueWriter.INSTANCE.writeIndexValue( value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); @@ -322,7 +322,7 @@ public void writeIndexValueSupportsLargestInt32() public void writeIndexValueSupportsSmallestInt32() throws ExecutionException, InterruptedException { UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); - Value value = dataReader.parseQueryValue(FieldValue.int32(-2147483648)); + Value value = dataReader.parseQueryValue(new Int32Value(-2147483648)); IndexByteEncoder encoder = new IndexByteEncoder(); FirestoreIndexValueWriter.INSTANCE.writeIndexValue( value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); @@ -343,7 +343,7 @@ public void writeIndexValueSupportsSmallestInt32() @Test public void writeIndexValueSupportsMinKey() throws ExecutionException, InterruptedException { UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); - Value value = dataReader.parseQueryValue(FieldValue.minKey()); + Value value = dataReader.parseQueryValue(MinKey.instance()); IndexByteEncoder encoder = new IndexByteEncoder(); FirestoreIndexValueWriter.INSTANCE.writeIndexValue( value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); @@ -363,7 +363,7 @@ public void writeIndexValueSupportsMinKey() throws ExecutionException, Interrupt @Test public void writeIndexValueSupportsMaxKey() throws ExecutionException, InterruptedException { UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); - Value value = dataReader.parseQueryValue(FieldValue.maxKey()); + Value value = dataReader.parseQueryValue(MaxKey.instance()); IndexByteEncoder encoder = new IndexByteEncoder(); FirestoreIndexValueWriter.INSTANCE.writeIndexValue( value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteIndexManagerTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteIndexManagerTest.java index 896c6edf6ff..fce0404c342 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteIndexManagerTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteIndexManagerTest.java @@ -39,8 +39,14 @@ import static org.junit.Assert.assertNull; import com.google.firebase.Timestamp; -import com.google.firebase.firestore.FieldValue; +import com.google.firebase.firestore.BsonBinaryData; +import com.google.firebase.firestore.BsonObjectId; +import com.google.firebase.firestore.BsonTimestamp; import com.google.firebase.firestore.GeoPoint; +import com.google.firebase.firestore.Int32Value; +import com.google.firebase.firestore.MaxKey; +import com.google.firebase.firestore.MinKey; +import com.google.firebase.firestore.RegexValue; import com.google.firebase.firestore.auth.User; import com.google.firebase.firestore.core.Filter; import com.google.firebase.firestore.core.Query; @@ -1243,51 +1249,35 @@ public void testIndexesBsonObjectId() { indexManager.addFieldIndex( fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); - addDoc("coll/doc1", map("key", FieldValue.bsonObjectId("507f191e810c19729de860ea"))); - addDoc("coll/doc2", map("key", FieldValue.bsonObjectId("507f191e810c19729de860eb"))); - addDoc("coll/doc3", map("key", FieldValue.bsonObjectId("507f191e810c19729de860ec"))); + addDoc("coll/doc1", map("key", new BsonObjectId("507f191e810c19729de860ea"))); + addDoc("coll/doc2", map("key", new BsonObjectId("507f191e810c19729de860eb"))); + addDoc("coll/doc3", map("key", new BsonObjectId("507f191e810c19729de860ec"))); Query query = query("coll").orderBy(orderBy("key", "asc")); verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); - query = - query("coll") - .filter(filter("key", "==", FieldValue.bsonObjectId("507f191e810c19729de860ea"))); + query = query("coll").filter(filter("key", "==", new BsonObjectId("507f191e810c19729de860ea"))); verifyResults(query, "coll/doc1"); - query = - query("coll") - .filter(filter("key", "!=", FieldValue.bsonObjectId("507f191e810c19729de860ea"))); + query = query("coll").filter(filter("key", "!=", new BsonObjectId("507f191e810c19729de860ea"))); verifyResults(query, "coll/doc2", "coll/doc3"); - query = - query("coll") - .filter(filter("key", ">=", FieldValue.bsonObjectId("507f191e810c19729de860eb"))); + query = query("coll").filter(filter("key", ">=", new BsonObjectId("507f191e810c19729de860eb"))); verifyResults(query, "coll/doc2", "coll/doc3"); - query = - query("coll") - .filter(filter("key", "<=", FieldValue.bsonObjectId("507f191e810c19729de860eb"))); + query = query("coll").filter(filter("key", "<=", new BsonObjectId("507f191e810c19729de860eb"))); verifyResults(query, "coll/doc1", "coll/doc2"); - query = - query("coll") - .filter(filter("key", ">", FieldValue.bsonObjectId("507f191e810c19729de860eb"))); + query = query("coll").filter(filter("key", ">", new BsonObjectId("507f191e810c19729de860eb"))); verifyResults(query, "coll/doc3"); - query = - query("coll") - .filter(filter("key", "<", FieldValue.bsonObjectId("507f191e810c19729de860eb"))); + query = query("coll").filter(filter("key", "<", new BsonObjectId("507f191e810c19729de860eb"))); verifyResults(query, "coll/doc1"); - query = - query("coll") - .filter(filter("key", ">", FieldValue.bsonObjectId("507f191e810c19729de860ec"))); + query = query("coll").filter(filter("key", ">", new BsonObjectId("507f191e810c19729de860ec"))); verifyResults(query); - query = - query("coll") - .filter(filter("key", "<", FieldValue.bsonObjectId("507f191e810c19729de860ea"))); + query = query("coll").filter(filter("key", "<", new BsonObjectId("507f191e810c19729de860ea"))); verifyResults(query); } @@ -1296,51 +1286,47 @@ public void testIndexesBsonBinary() { indexManager.addFieldIndex( fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); - addDoc("coll/doc1", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); - addDoc("coll/doc2", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4}))); - addDoc("coll/doc3", map("key", FieldValue.bsonBinaryData(1, new byte[] {2, 1, 2}))); + addDoc("coll/doc1", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + addDoc("coll/doc2", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}))); + addDoc("coll/doc3", map("key", BsonBinaryData.fromBytes(1, new byte[] {2, 1, 2}))); Query query = query("coll").orderBy(orderBy("key", "asc")); verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); query = query("coll") - .filter(filter("key", "==", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); + .filter(filter("key", "==", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); verifyResults(query, "coll/doc1"); query = query("coll") - .filter(filter("key", "!=", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); + .filter(filter("key", "!=", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); verifyResults(query, "coll/doc2", "coll/doc3"); query = query("coll") - .filter(filter("key", ">=", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4}))); + .filter(filter("key", ">=", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}))); verifyResults(query, "coll/doc2", "coll/doc3"); query = query("coll") - .filter(filter("key", "<=", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4}))); + .filter(filter("key", "<=", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}))); verifyResults(query, "coll/doc1", "coll/doc2"); query = - query("coll") - .filter(filter("key", ">", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4}))); + query("coll").filter(filter("key", ">", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}))); verifyResults(query, "coll/doc3"); query = - query("coll") - .filter(filter("key", "<", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4}))); + query("coll").filter(filter("key", "<", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}))); verifyResults(query, "coll/doc1"); query = - query("coll") - .filter(filter("key", ">", FieldValue.bsonBinaryData(1, new byte[] {2, 1, 2}))); + query("coll").filter(filter("key", ">", BsonBinaryData.fromBytes(1, new byte[] {2, 1, 2}))); verifyResults(query); query = - query("coll") - .filter(filter("key", "<", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); + query("coll").filter(filter("key", "<", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); verifyResults(query); } @@ -1349,35 +1335,35 @@ public void testIndexesBsonTimestamp() { indexManager.addFieldIndex( fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); - addDoc("coll/doc1", map("key", FieldValue.bsonTimestamp(1, 1))); - addDoc("coll/doc2", map("key", FieldValue.bsonTimestamp(1, 2))); - addDoc("coll/doc3", map("key", FieldValue.bsonTimestamp(2, 1))); + addDoc("coll/doc1", map("key", new BsonTimestamp(1, 1))); + addDoc("coll/doc2", map("key", new BsonTimestamp(1, 2))); + addDoc("coll/doc3", map("key", new BsonTimestamp(2, 1))); Query query = query("coll").orderBy(orderBy("key", "asc")); verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); - query = query("coll").filter(filter("key", "==", FieldValue.bsonTimestamp(1, 1))); + query = query("coll").filter(filter("key", "==", new BsonTimestamp(1, 1))); verifyResults(query, "coll/doc1"); - query = query("coll").filter(filter("key", "!=", FieldValue.bsonTimestamp(1, 1))); + query = query("coll").filter(filter("key", "!=", new BsonTimestamp(1, 1))); verifyResults(query, "coll/doc2", "coll/doc3"); - query = query("coll").filter(filter("key", ">=", FieldValue.bsonTimestamp(1, 2))); + query = query("coll").filter(filter("key", ">=", new BsonTimestamp(1, 2))); verifyResults(query, "coll/doc2", "coll/doc3"); - query = query("coll").filter(filter("key", "<=", FieldValue.bsonTimestamp(1, 2))); + query = query("coll").filter(filter("key", "<=", new BsonTimestamp(1, 2))); verifyResults(query, "coll/doc1", "coll/doc2"); - query = query("coll").filter(filter("key", ">", FieldValue.bsonTimestamp(1, 2))); + query = query("coll").filter(filter("key", ">", new BsonTimestamp(1, 2))); verifyResults(query, "coll/doc3"); - query = query("coll").filter(filter("key", "<", FieldValue.bsonTimestamp(1, 2))); + query = query("coll").filter(filter("key", "<", new BsonTimestamp(1, 2))); verifyResults(query, "coll/doc1"); - query = query("coll").filter(filter("key", ">", FieldValue.bsonTimestamp(2, 1))); + query = query("coll").filter(filter("key", ">", new BsonTimestamp(2, 1))); verifyResults(query); - query = query("coll").filter(filter("key", "<", FieldValue.bsonTimestamp(1, 1))); + query = query("coll").filter(filter("key", "<", new BsonTimestamp(1, 1))); verifyResults(query); } @@ -1386,35 +1372,35 @@ public void testIndexesRegex() { indexManager.addFieldIndex( fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); - addDoc("coll/doc1", map("key", FieldValue.regex("a", "i"))); - addDoc("coll/doc2", map("key", FieldValue.regex("a", "m"))); - addDoc("coll/doc3", map("key", FieldValue.regex("b", "i"))); + addDoc("coll/doc1", map("key", new RegexValue("a", "i"))); + addDoc("coll/doc2", map("key", new RegexValue("a", "m"))); + addDoc("coll/doc3", map("key", new RegexValue("b", "i"))); Query query = query("coll").orderBy(orderBy("key", "asc")); verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); - query = query("coll").filter(filter("key", "==", FieldValue.regex("a", "i"))); + query = query("coll").filter(filter("key", "==", new RegexValue("a", "i"))); verifyResults(query, "coll/doc1"); - query = query("coll").filter(filter("key", "!=", FieldValue.regex("a", "i"))); + query = query("coll").filter(filter("key", "!=", new RegexValue("a", "i"))); verifyResults(query, "coll/doc2", "coll/doc3"); - query = query("coll").filter(filter("key", ">=", FieldValue.regex("a", "m"))); + query = query("coll").filter(filter("key", ">=", new RegexValue("a", "m"))); verifyResults(query, "coll/doc2", "coll/doc3"); - query = query("coll").filter(filter("key", "<=", FieldValue.regex("a", "m"))); + query = query("coll").filter(filter("key", "<=", new RegexValue("a", "m"))); verifyResults(query, "coll/doc1", "coll/doc2"); - query = query("coll").filter(filter("key", ">", FieldValue.regex("a", "m"))); + query = query("coll").filter(filter("key", ">", new RegexValue("a", "m"))); verifyResults(query, "coll/doc3"); - query = query("coll").filter(filter("key", "<", FieldValue.regex("a", "m"))); + query = query("coll").filter(filter("key", "<", new RegexValue("a", "m"))); verifyResults(query, "coll/doc1"); - query = query("coll").filter(filter("key", ">", FieldValue.regex("b", "i"))); + query = query("coll").filter(filter("key", ">", new RegexValue("b", "i"))); verifyResults(query); - query = query("coll").filter(filter("key", "<", FieldValue.regex("a", "i"))); + query = query("coll").filter(filter("key", "<", new RegexValue("a", "i"))); verifyResults(query); } @@ -1423,35 +1409,35 @@ public void testIndexesInt32() { indexManager.addFieldIndex( fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); - addDoc("coll/doc1", map("key", FieldValue.int32(1))); - addDoc("coll/doc2", map("key", FieldValue.int32(2))); - addDoc("coll/doc3", map("key", FieldValue.int32(3))); + addDoc("coll/doc1", map("key", new Int32Value(1))); + addDoc("coll/doc2", map("key", new Int32Value(2))); + addDoc("coll/doc3", map("key", new Int32Value(3))); Query query = query("coll").orderBy(orderBy("key", "asc")); verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); - query = query("coll").filter(filter("key", "==", FieldValue.int32(1))); + query = query("coll").filter(filter("key", "==", new Int32Value(1))); verifyResults(query, "coll/doc1"); - query = query("coll").filter(filter("key", "!=", FieldValue.int32(1))); + query = query("coll").filter(filter("key", "!=", new Int32Value(1))); verifyResults(query, "coll/doc2", "coll/doc3"); - query = query("coll").filter(filter("key", ">=", FieldValue.int32(2))); + query = query("coll").filter(filter("key", ">=", new Int32Value(2))); verifyResults(query, "coll/doc2", "coll/doc3"); - query = query("coll").filter(filter("key", "<=", FieldValue.int32(2))); + query = query("coll").filter(filter("key", "<=", new Int32Value(2))); verifyResults(query, "coll/doc1", "coll/doc2"); - query = query("coll").filter(filter("key", ">", FieldValue.int32(2))); + query = query("coll").filter(filter("key", ">", new Int32Value(2))); verifyResults(query, "coll/doc3"); - query = query("coll").filter(filter("key", "<", FieldValue.int32(2))); + query = query("coll").filter(filter("key", "<", new Int32Value(2))); verifyResults(query, "coll/doc1"); - query = query("coll").filter(filter("key", ">", FieldValue.int32(3))); + query = query("coll").filter(filter("key", ">", new Int32Value(3))); verifyResults(query); - query = query("coll").filter(filter("key", "<", FieldValue.int32(1))); + query = query("coll").filter(filter("key", "<", new Int32Value(1))); verifyResults(query); } @@ -1459,31 +1445,31 @@ public void testIndexesInt32() { public void testIndexesMinKey() { indexManager.addFieldIndex( fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); - addDoc("coll/doc1", map("key", FieldValue.minKey())); - addDoc("coll/doc2", map("key", FieldValue.minKey())); + addDoc("coll/doc1", map("key", MinKey.instance())); + addDoc("coll/doc2", map("key", MinKey.instance())); addDoc("coll/doc3", map("key", null)); addDoc("coll/doc4", map("key", 1)); - addDoc("coll/doc5", map("key", FieldValue.maxKey())); + addDoc("coll/doc5", map("key", MaxKey.instance())); Query query = query("coll").orderBy(orderBy("key", "asc")); verifyResults(query, "coll/doc3", "coll/doc1", "coll/doc2", "coll/doc4", "coll/doc5"); - query = query("coll").filter(filter("key", "==", FieldValue.minKey())); + query = query("coll").filter(filter("key", "==", MinKey.instance())); verifyResults(query, "coll/doc1", "coll/doc2"); - query = query("coll").filter(filter("key", "!=", FieldValue.minKey())); + query = query("coll").filter(filter("key", "!=", MinKey.instance())); verifyResults(query, "coll/doc4", "coll/doc5"); - query = query("coll").filter(filter("key", ">=", FieldValue.minKey())); + query = query("coll").filter(filter("key", ">=", MinKey.instance())); verifyResults(query, "coll/doc1", "coll/doc2"); - query = query("coll").filter(filter("key", "<=", FieldValue.minKey())); + query = query("coll").filter(filter("key", "<=", MinKey.instance())); verifyResults(query, "coll/doc1", "coll/doc2"); - query = query("coll").filter(filter("key", ">", FieldValue.minKey())); + query = query("coll").filter(filter("key", ">", MinKey.instance())); verifyResults(query); - query = query("coll").filter(filter("key", "<", FieldValue.minKey())); + query = query("coll").filter(filter("key", "<", MinKey.instance())); verifyResults(query); } @@ -1491,31 +1477,31 @@ public void testIndexesMinKey() { public void testIndexesMaxKey() { indexManager.addFieldIndex( fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); - addDoc("coll/doc1", map("key", FieldValue.minKey())); + addDoc("coll/doc1", map("key", MinKey.instance())); addDoc("coll/doc2", map("key", 1)); - addDoc("coll/doc3", map("key", FieldValue.maxKey())); - addDoc("coll/doc4", map("key", FieldValue.maxKey())); + addDoc("coll/doc3", map("key", MaxKey.instance())); + addDoc("coll/doc4", map("key", MaxKey.instance())); addDoc("coll/doc5", map("key", null)); Query query = query("coll").orderBy(orderBy("key", "asc")); verifyResults(query, "coll/doc5", "coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4"); - query = query("coll").filter(filter("key", "==", FieldValue.maxKey())); + query = query("coll").filter(filter("key", "==", MaxKey.instance())); verifyResults(query, "coll/doc3", "coll/doc4"); - query = query("coll").filter(filter("key", "!=", FieldValue.maxKey())); + query = query("coll").filter(filter("key", "!=", MaxKey.instance())); verifyResults(query, "coll/doc1", "coll/doc2"); - query = query("coll").filter(filter("key", ">=", FieldValue.maxKey())); + query = query("coll").filter(filter("key", ">=", MaxKey.instance())); verifyResults(query, "coll/doc3", "coll/doc4"); - query = query("coll").filter(filter("key", "<=", FieldValue.maxKey())); + query = query("coll").filter(filter("key", "<=", MaxKey.instance())); verifyResults(query, "coll/doc3", "coll/doc4"); - query = query("coll").filter(filter("key", ">", FieldValue.maxKey())); + query = query("coll").filter(filter("key", ">", MaxKey.instance())); verifyResults(query); - query = query("coll").filter(filter("key", "<", FieldValue.maxKey())); + query = query("coll").filter(filter("key", "<", MaxKey.instance())); verifyResults(query); } @@ -1523,18 +1509,18 @@ public void testIndexesMaxKey() { public void testIndexFieldsOfBsonTypesTogether() { indexManager.addFieldIndex(fieldIndex("coll", "key", Kind.DESCENDING)); - addDoc("coll/doc1", map("key", FieldValue.minKey())); - addDoc("coll/doc2", map("key", FieldValue.int32(2))); - addDoc("coll/doc3", map("key", FieldValue.int32(1))); - addDoc("coll/doc4", map("key", FieldValue.bsonTimestamp(1, 2))); - addDoc("coll/doc5", map("key", FieldValue.bsonTimestamp(1, 1))); - addDoc("coll/doc6", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4}))); - addDoc("coll/doc7", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); - addDoc("coll/doc8", map("key", FieldValue.bsonObjectId("507f191e810c19729de860eb"))); - addDoc("coll/doc9", map("key", FieldValue.bsonObjectId("507f191e810c19729de860ea"))); - addDoc("coll/doc10", map("key", FieldValue.regex("a", "m"))); - addDoc("coll/doc11", map("key", FieldValue.regex("a", "i"))); - addDoc("coll/doc12", map("key", FieldValue.maxKey())); + addDoc("coll/doc1", map("key", MinKey.instance())); + addDoc("coll/doc2", map("key", new Int32Value(2))); + addDoc("coll/doc3", map("key", new Int32Value(1))); + addDoc("coll/doc4", map("key", new BsonTimestamp(1, 2))); + addDoc("coll/doc5", map("key", new BsonTimestamp(1, 1))); + addDoc("coll/doc6", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}))); + addDoc("coll/doc7", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + addDoc("coll/doc8", map("key", new BsonObjectId("507f191e810c19729de860eb"))); + addDoc("coll/doc9", map("key", new BsonObjectId("507f191e810c19729de860ea"))); + addDoc("coll/doc10", map("key", new RegexValue("a", "m"))); + addDoc("coll/doc11", map("key", new RegexValue("a", "i"))); + addDoc("coll/doc12", map("key", MaxKey.instance())); Query query = query("coll").orderBy(orderBy("key", "desc")); verifyResults( @@ -1559,25 +1545,25 @@ public void testIndexFieldsOfAllTypesTogether() { indexManager.addFieldIndex(fieldIndex("coll", "key", Kind.DESCENDING)); addDoc("coll/a", map("key", null)); - addDoc("coll/b", map("key", FieldValue.minKey())); + addDoc("coll/b", map("key", MinKey.instance())); addDoc("coll/c", map("key", true)); addDoc("coll/d", map("key", Double.NaN)); - addDoc("coll/e", map("key", FieldValue.int32(1))); + addDoc("coll/e", map("key", new Int32Value(1))); addDoc("coll/f", map("key", 2.0)); addDoc("coll/g", map("key", 3L)); addDoc("coll/h", map("key", new Timestamp(100, 123456000))); - addDoc("coll/i", map("key", FieldValue.bsonTimestamp(1, 2))); + addDoc("coll/i", map("key", new BsonTimestamp(1, 2))); addDoc("coll/j", map("key", "string")); addDoc("coll/k", map("key", blob(1, 2, 3))); - addDoc("coll/l", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); + addDoc("coll/l", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); addDoc("coll/m", map("key", ref("foo/bar"))); - addDoc("coll/n", map("key", FieldValue.bsonObjectId("507f191e810c19729de860ea"))); + addDoc("coll/n", map("key", new BsonObjectId("507f191e810c19729de860ea"))); addDoc("coll/o", map("key", new GeoPoint(0, 1))); - addDoc("coll/p", map("key", FieldValue.regex("^foo", "i"))); + addDoc("coll/p", map("key", new RegexValue("^foo", "i"))); addDoc("coll/q", map("key", Arrays.asList(1, 2))); // Note: Vector type not available in Java SDK, skipping 'r' addDoc("coll/s", map("key", map("a", 1))); - addDoc("coll/t", map("key", FieldValue.maxKey())); + addDoc("coll/t", map("key", MaxKey.instance())); Query query = query("coll").orderBy(orderBy("key", "desc")); verifyResults( diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteLocalStoreTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteLocalStoreTest.java index 0c78edaca84..1edbef0474c 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteLocalStoreTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteLocalStoreTest.java @@ -36,8 +36,15 @@ import static java.util.Collections.singletonList; import com.google.firebase.Timestamp; +import com.google.firebase.firestore.BsonBinaryData; +import com.google.firebase.firestore.BsonObjectId; +import com.google.firebase.firestore.BsonTimestamp; import com.google.firebase.firestore.FieldValue; import com.google.firebase.firestore.GeoPoint; +import com.google.firebase.firestore.Int32Value; +import com.google.firebase.firestore.MaxKey; +import com.google.firebase.firestore.MinKey; +import com.google.firebase.firestore.RegexValue; import com.google.firebase.firestore.core.Query; import com.google.firebase.firestore.model.DocumentKey; import com.google.firebase.firestore.model.FieldIndex; @@ -377,11 +384,11 @@ public void testIndexesBsonObjectId() { configureFieldIndexes(singletonList(index)); writeMutation( - setMutation("coll/doc1", map("key", FieldValue.bsonObjectId("507f191e810c19729de860ea")))); + setMutation("coll/doc1", map("key", new BsonObjectId("507f191e810c19729de860ea")))); writeMutation( - setMutation("coll/doc2", map("key", FieldValue.bsonObjectId("507f191e810c19729de860eb")))); + setMutation("coll/doc2", map("key", new BsonObjectId("507f191e810c19729de860eb")))); writeMutation( - setMutation("coll/doc3", map("key", FieldValue.bsonObjectId("507f191e810c19729de860ec")))); + setMutation("coll/doc3", map("key", new BsonObjectId("507f191e810c19729de860ec")))); backfillIndexes(); @@ -398,17 +405,13 @@ public void testIndexesBsonObjectId() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); - query = - query("coll") - .filter(filter("key", "==", FieldValue.bsonObjectId("507f191e810c19729de860ea"))); + query = query("coll").filter(filter("key", "==", new BsonObjectId("507f191e810c19729de860ea"))); executeQuery(query); assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc1"); - query = - query("coll") - .filter(filter("key", "!=", FieldValue.bsonObjectId("507f191e810c19729de860ea"))); + query = query("coll").filter(filter("key", "!=", new BsonObjectId("507f191e810c19729de860ea"))); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -419,9 +422,7 @@ public void testIndexesBsonObjectId() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc2", "coll/doc3"); - query = - query("coll") - .filter(filter("key", ">=", FieldValue.bsonObjectId("507f191e810c19729de860eb"))); + query = query("coll").filter(filter("key", ">=", new BsonObjectId("507f191e810c19729de860eb"))); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -432,9 +433,7 @@ public void testIndexesBsonObjectId() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc2", "coll/doc3"); - query = - query("coll") - .filter(filter("key", "<=", FieldValue.bsonObjectId("507f191e810c19729de860eb"))); + query = query("coll").filter(filter("key", "<=", new BsonObjectId("507f191e810c19729de860eb"))); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -445,17 +444,13 @@ public void testIndexesBsonObjectId() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc1", "coll/doc2"); - query = - query("coll") - .filter(filter("key", ">", FieldValue.bsonObjectId("507f191e810c19729de860ec"))); + query = query("coll").filter(filter("key", ">", new BsonObjectId("507f191e810c19729de860ec"))); executeQuery(query); assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); assertOverlayTypes(keyMap()); assertQueryReturned(); - query = - query("coll") - .filter(filter("key", "<", FieldValue.bsonObjectId("507f191e810c19729de860ea"))); + query = query("coll").filter(filter("key", "<", new BsonObjectId("507f191e810c19729de860ea"))); executeQuery(query); assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); assertOverlayTypes(keyMap()); @@ -468,8 +463,8 @@ public void testIndexesBsonObjectId() { "key", "in", Arrays.asList( - FieldValue.bsonObjectId("507f191e810c19729de860ea"), - FieldValue.bsonObjectId("507f191e810c19729de860eb")))); + new BsonObjectId("507f191e810c19729de860ea"), + new BsonObjectId("507f191e810c19729de860eb")))); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -487,9 +482,9 @@ public void testIndexesBsonTimestamp() { fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); configureFieldIndexes(singletonList(index)); - writeMutation(setMutation("coll/doc1", map("key", FieldValue.bsonTimestamp(1000, 1000)))); - writeMutation(setMutation("coll/doc2", map("key", FieldValue.bsonTimestamp(1001, 1000)))); - writeMutation(setMutation("coll/doc3", map("key", FieldValue.bsonTimestamp(1000, 1001)))); + writeMutation(setMutation("coll/doc1", map("key", new BsonTimestamp(1000, 1000)))); + writeMutation(setMutation("coll/doc2", map("key", new BsonTimestamp(1001, 1000)))); + writeMutation(setMutation("coll/doc3", map("key", new BsonTimestamp(1000, 1001)))); backfillIndexes(); @@ -506,13 +501,13 @@ public void testIndexesBsonTimestamp() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc1", "coll/doc3", "coll/doc2"); - query = query("coll").filter(filter("key", "==", FieldValue.bsonTimestamp(1000, 1000))); + query = query("coll").filter(filter("key", "==", new BsonTimestamp(1000, 1000))); executeQuery(query); assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc1"); - query = query("coll").filter(filter("key", "!=", FieldValue.bsonTimestamp(1000, 1000))); + query = query("coll").filter(filter("key", "!=", new BsonTimestamp(1000, 1000))); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -523,7 +518,7 @@ public void testIndexesBsonTimestamp() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc3", "coll/doc2"); - query = query("coll").filter(filter("key", ">=", FieldValue.bsonTimestamp(1000, 1001))); + query = query("coll").filter(filter("key", ">=", new BsonTimestamp(1000, 1001))); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -534,7 +529,7 @@ public void testIndexesBsonTimestamp() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc3", "coll/doc2"); - query = query("coll").filter(filter("key", "<=", FieldValue.bsonTimestamp(1000, 1001))); + query = query("coll").filter(filter("key", "<=", new BsonTimestamp(1000, 1001))); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -545,13 +540,13 @@ public void testIndexesBsonTimestamp() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc1", "coll/doc3"); - query = query("coll").filter(filter("key", ">", FieldValue.bsonTimestamp(1001, 1000))); + query = query("coll").filter(filter("key", ">", new BsonTimestamp(1001, 1000))); executeQuery(query); assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); assertOverlayTypes(keyMap()); assertQueryReturned(); - query = query("coll").filter(filter("key", "<", FieldValue.bsonTimestamp(1000, 1000))); + query = query("coll").filter(filter("key", "<", new BsonTimestamp(1000, 1000))); executeQuery(query); assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); assertOverlayTypes(keyMap()); @@ -563,9 +558,7 @@ public void testIndexesBsonTimestamp() { filter( "key", "in", - Arrays.asList( - FieldValue.bsonTimestamp(1000, 1000), - FieldValue.bsonTimestamp(1000, 1001)))); + Arrays.asList(new BsonTimestamp(1000, 1000), new BsonTimestamp(1000, 1001)))); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -584,13 +577,13 @@ public void testIndexesBsonBinary() { configureFieldIndexes(singletonList(index)); writeMutation( - setMutation("coll/doc1", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})))); + setMutation("coll/doc1", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})))); writeMutation( - setMutation("coll/doc2", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2})))); + setMutation("coll/doc2", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2})))); writeMutation( - setMutation("coll/doc3", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4})))); + setMutation("coll/doc3", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})))); writeMutation( - setMutation("coll/doc4", map("key", FieldValue.bsonBinaryData(2, new byte[] {1, 2})))); + setMutation("coll/doc4", map("key", BsonBinaryData.fromBytes(2, new byte[] {1, 2})))); backfillIndexes(); @@ -611,7 +604,7 @@ public void testIndexesBsonBinary() { query = query("coll") - .filter(filter("key", "==", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); + .filter(filter("key", "==", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); executeQuery(query); assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); @@ -619,7 +612,7 @@ public void testIndexesBsonBinary() { query = query("coll") - .filter(filter("key", "!=", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); + .filter(filter("key", "!=", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); executeQuery(query); assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); assertOverlayTypes( @@ -634,7 +627,7 @@ public void testIndexesBsonBinary() { query = query("coll") - .filter(filter("key", ">=", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); + .filter(filter("key", ">=", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); executeQuery(query); assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); assertOverlayTypes( @@ -649,7 +642,7 @@ public void testIndexesBsonBinary() { query = query("coll") - .filter(filter("key", "<=", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}))); + .filter(filter("key", "<=", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -661,14 +654,14 @@ public void testIndexesBsonBinary() { assertQueryReturned("coll/doc2", "coll/doc1"); query = - query("coll").filter(filter("key", ">", FieldValue.bsonBinaryData(2, new byte[] {1, 2}))); + query("coll").filter(filter("key", ">", BsonBinaryData.fromBytes(2, new byte[] {1, 2}))); executeQuery(query); assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); assertOverlayTypes(keyMap()); assertQueryReturned(); query = - query("coll").filter(filter("key", "<", FieldValue.bsonBinaryData(1, new byte[] {1, 2}))); + query("coll").filter(filter("key", "<", BsonBinaryData.fromBytes(1, new byte[] {1, 2}))); executeQuery(query); assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); assertOverlayTypes(keyMap()); @@ -681,8 +674,8 @@ public void testIndexesBsonBinary() { "key", "in", Arrays.asList( - FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3}), - FieldValue.bsonBinaryData(1, new byte[] {1, 2})))); + BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + BsonBinaryData.fromBytes(1, new byte[] {1, 2})))); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -700,9 +693,9 @@ public void testIndexesRegex() { fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); configureFieldIndexes(singletonList(index)); - writeMutation(setMutation("coll/doc1", map("key", FieldValue.regex("^bar", "i")))); - writeMutation(setMutation("coll/doc2", map("key", FieldValue.regex("^bar", "m")))); - writeMutation(setMutation("coll/doc3", map("key", FieldValue.regex("^foo", "i")))); + writeMutation(setMutation("coll/doc1", map("key", new RegexValue("^bar", "i")))); + writeMutation(setMutation("coll/doc2", map("key", new RegexValue("^bar", "m")))); + writeMutation(setMutation("coll/doc3", map("key", new RegexValue("^foo", "i")))); backfillIndexes(); @@ -719,13 +712,13 @@ public void testIndexesRegex() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); - query = query("coll").filter(filter("key", "==", FieldValue.regex("^bar", "i"))); + query = query("coll").filter(filter("key", "==", new RegexValue("^bar", "i"))); executeQuery(query); assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc1"); - query = query("coll").filter(filter("key", "!=", FieldValue.regex("^bar", "i"))); + query = query("coll").filter(filter("key", "!=", new RegexValue("^bar", "i"))); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -736,13 +729,13 @@ public void testIndexesRegex() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc2", "coll/doc3"); - query = query("coll").filter(filter("key", ">", FieldValue.regex("^foo", "i"))); + query = query("coll").filter(filter("key", ">", new RegexValue("^foo", "i"))); executeQuery(query); assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); assertOverlayTypes(keyMap()); assertQueryReturned(); - query = query("coll").filter(filter("key", "<", FieldValue.regex("^bar", "i"))); + query = query("coll").filter(filter("key", "<", new RegexValue("^bar", "i"))); executeQuery(query); assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); assertOverlayTypes(keyMap()); @@ -754,7 +747,7 @@ public void testIndexesRegex() { filter( "key", "in", - Arrays.asList(FieldValue.regex("^bar", "i"), FieldValue.regex("^foo", "i")))); + Arrays.asList(new RegexValue("^bar", "i"), new RegexValue("^foo", "i")))); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -771,9 +764,9 @@ public void testIndexesInt32() { FieldIndex index = fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); configureFieldIndexes(singletonList(index)); - writeMutation(setMutation("coll/doc1", map("key", FieldValue.int32(-1)))); - writeMutation(setMutation("coll/doc2", map("key", FieldValue.int32(0)))); - writeMutation(setMutation("coll/doc3", map("key", FieldValue.int32(1)))); + writeMutation(setMutation("coll/doc1", map("key", new Int32Value(-1)))); + writeMutation(setMutation("coll/doc2", map("key", new Int32Value(0)))); + writeMutation(setMutation("coll/doc3", map("key", new Int32Value(1)))); backfillIndexes(); @@ -790,13 +783,13 @@ public void testIndexesInt32() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); - query = query("coll").filter(filter("key", "==", FieldValue.int32(-1))); + query = query("coll").filter(filter("key", "==", new Int32Value(-1))); executeQuery(query); assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc1"); - query = query("coll").filter(filter("key", "!=", FieldValue.int32(-1))); + query = query("coll").filter(filter("key", "!=", new Int32Value(-1))); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -807,7 +800,7 @@ public void testIndexesInt32() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc2", "coll/doc3"); - query = query("coll").filter(filter("key", ">=", FieldValue.int32(0))); + query = query("coll").filter(filter("key", ">=", new Int32Value(0))); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -818,7 +811,7 @@ public void testIndexesInt32() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc2", "coll/doc3"); - query = query("coll").filter(filter("key", "<=", FieldValue.int32(0))); + query = query("coll").filter(filter("key", "<=", new Int32Value(0))); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -829,13 +822,13 @@ public void testIndexesInt32() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc1", "coll/doc2"); - query = query("coll").filter(filter("key", ">", FieldValue.int32(1))); + query = query("coll").filter(filter("key", ">", new Int32Value(1))); executeQuery(query); assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); assertOverlayTypes(keyMap()); assertQueryReturned(); - query = query("coll").filter(filter("key", "<", FieldValue.int32(-1))); + query = query("coll").filter(filter("key", "<", new Int32Value(-1))); executeQuery(query); assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); assertOverlayTypes(keyMap()); @@ -843,7 +836,7 @@ public void testIndexesInt32() { query = query("coll") - .filter(filter("key", "in", Arrays.asList(FieldValue.int32(-1), FieldValue.int32(0)))); + .filter(filter("key", "in", Arrays.asList(new Int32Value(-1), new Int32Value(0)))); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -862,10 +855,10 @@ public void testIndexesMinKey() { configureFieldIndexes(singletonList(index)); writeMutation(setMutation("coll/doc1", map("key", null))); - writeMutation(setMutation("coll/doc2", map("key", FieldValue.minKey()))); - writeMutation(setMutation("coll/doc3", map("key", FieldValue.minKey()))); + writeMutation(setMutation("coll/doc2", map("key", MinKey.instance()))); + writeMutation(setMutation("coll/doc3", map("key", MinKey.instance()))); writeMutation(setMutation("coll/doc4", map("key", 1))); - writeMutation(setMutation("coll/doc5", map("key", FieldValue.maxKey()))); + writeMutation(setMutation("coll/doc5", map("key", MaxKey.instance()))); backfillIndexes(); @@ -886,7 +879,7 @@ public void testIndexesMinKey() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4", "coll/doc5"); - query = query("coll").filter(filter("key", "==", FieldValue.minKey())); + query = query("coll").filter(filter("key", "==", MinKey.instance())); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -897,7 +890,7 @@ public void testIndexesMinKey() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc2", "coll/doc3"); - query = query("coll").filter(filter("key", "!=", FieldValue.minKey())); + query = query("coll").filter(filter("key", "!=", MinKey.instance())); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -908,7 +901,7 @@ public void testIndexesMinKey() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc4", "coll/doc5"); - query = query("coll").filter(filter("key", ">=", FieldValue.minKey())); + query = query("coll").filter(filter("key", ">=", MinKey.instance())); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -919,7 +912,7 @@ public void testIndexesMinKey() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc2", "coll/doc3"); - query = query("coll").filter(filter("key", "<=", FieldValue.minKey())); + query = query("coll").filter(filter("key", "<=", MinKey.instance())); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -930,13 +923,13 @@ public void testIndexesMinKey() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc2", "coll/doc3"); - query = query("coll").filter(filter("key", ">", FieldValue.minKey())); + query = query("coll").filter(filter("key", ">", MinKey.instance())); executeQuery(query); assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); assertOverlayTypes(keyMap()); assertQueryReturned(); - query = query("coll").filter(filter("key", "<", FieldValue.minKey())); + query = query("coll").filter(filter("key", "<", MinKey.instance())); executeQuery(query); assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); assertOverlayTypes(keyMap()); @@ -944,7 +937,7 @@ public void testIndexesMinKey() { query = query("coll") - .filter(filter("key", "in", Arrays.asList(FieldValue.minKey(), FieldValue.maxKey()))); + .filter(filter("key", "in", Arrays.asList(MinKey.instance(), MaxKey.instance()))); executeQuery(query); assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); assertOverlayTypes( @@ -965,10 +958,10 @@ public void testIndexesMaxKey() { configureFieldIndexes(singletonList(index)); writeMutation(setMutation("coll/doc1", map("key", null))); - writeMutation(setMutation("coll/doc2", map("key", FieldValue.minKey()))); + writeMutation(setMutation("coll/doc2", map("key", MinKey.instance()))); writeMutation(setMutation("coll/doc3", map("key", 1))); - writeMutation(setMutation("coll/doc4", map("key", FieldValue.maxKey()))); - writeMutation(setMutation("coll/doc5", map("key", FieldValue.maxKey()))); + writeMutation(setMutation("coll/doc4", map("key", MaxKey.instance()))); + writeMutation(setMutation("coll/doc5", map("key", MaxKey.instance()))); backfillIndexes(); @@ -989,7 +982,7 @@ public void testIndexesMaxKey() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3", "coll/doc4", "coll/doc5"); - query = query("coll").filter(filter("key", "==", FieldValue.maxKey())); + query = query("coll").filter(filter("key", "==", MaxKey.instance())); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -1000,7 +993,7 @@ public void testIndexesMaxKey() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc4", "coll/doc5"); - query = query("coll").filter(filter("key", "!=", FieldValue.maxKey())); + query = query("coll").filter(filter("key", "!=", MaxKey.instance())); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -1011,7 +1004,7 @@ public void testIndexesMaxKey() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc2", "coll/doc3"); - query = query("coll").filter(filter("key", ">=", FieldValue.maxKey())); + query = query("coll").filter(filter("key", ">=", MaxKey.instance())); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -1022,7 +1015,7 @@ public void testIndexesMaxKey() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc4", "coll/doc5"); - query = query("coll").filter(filter("key", "<=", FieldValue.maxKey())); + query = query("coll").filter(filter("key", "<=", MaxKey.instance())); executeQuery(query); assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); assertOverlayTypes( @@ -1033,13 +1026,13 @@ public void testIndexesMaxKey() { CountingQueryEngine.OverlayType.Set)); assertQueryReturned("coll/doc4", "coll/doc5"); - query = query("coll").filter(filter("key", ">", FieldValue.maxKey())); + query = query("coll").filter(filter("key", ">", MaxKey.instance())); executeQuery(query); assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); assertOverlayTypes(keyMap()); assertQueryReturned(); - query = query("coll").filter(filter("key", "<", FieldValue.maxKey())); + query = query("coll").filter(filter("key", "<", MaxKey.instance())); executeQuery(query); assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); assertOverlayTypes(keyMap()); @@ -1052,22 +1045,22 @@ public void testIndexesAllBsonTypesTogether() { fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.DESCENDING); configureFieldIndexes(singletonList(index)); - writeMutation(setMutation("coll/doc1", map("key", FieldValue.minKey()))); - writeMutation(setMutation("coll/doc2", map("key", FieldValue.int32(2)))); - writeMutation(setMutation("coll/doc3", map("key", FieldValue.int32(1)))); - writeMutation(setMutation("coll/doc4", map("key", FieldValue.bsonTimestamp(1000, 1001)))); - writeMutation(setMutation("coll/doc5", map("key", FieldValue.bsonTimestamp(1000, 1000)))); + writeMutation(setMutation("coll/doc1", map("key", MinKey.instance()))); + writeMutation(setMutation("coll/doc2", map("key", new Int32Value(2)))); + writeMutation(setMutation("coll/doc3", map("key", new Int32Value(1)))); + writeMutation(setMutation("coll/doc4", map("key", new BsonTimestamp(1000, 1001)))); + writeMutation(setMutation("coll/doc5", map("key", new BsonTimestamp(1000, 1000)))); writeMutation( - setMutation("coll/doc6", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4})))); + setMutation("coll/doc6", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})))); writeMutation( - setMutation("coll/doc7", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})))); + setMutation("coll/doc7", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})))); writeMutation( - setMutation("coll/doc8", map("key", FieldValue.bsonObjectId("507f191e810c19729de860eb")))); + setMutation("coll/doc8", map("key", new BsonObjectId("507f191e810c19729de860eb")))); writeMutation( - setMutation("coll/doc9", map("key", FieldValue.bsonObjectId("507f191e810c19729de860ea")))); - writeMutation(setMutation("coll/doc10", map("key", FieldValue.regex("^bar", "m")))); - writeMutation(setMutation("coll/doc11", map("key", FieldValue.regex("^bar", "i")))); - writeMutation(setMutation("coll/doc12", map("key", FieldValue.maxKey()))); + setMutation("coll/doc9", map("key", new BsonObjectId("507f191e810c19729de860ea")))); + writeMutation(setMutation("coll/doc10", map("key", new RegexValue("^bar", "m")))); + writeMutation(setMutation("coll/doc11", map("key", new RegexValue("^bar", "i")))); + writeMutation(setMutation("coll/doc12", map("key", MaxKey.instance()))); backfillIndexes(); @@ -1122,27 +1115,27 @@ public void testIndexesAllTypesTogether() { configureFieldIndexes(singletonList(index)); writeMutation(setMutation("coll/doc1", map("key", null))); - writeMutation(setMutation("coll/doc2", map("key", FieldValue.minKey()))); + writeMutation(setMutation("coll/doc2", map("key", MinKey.instance()))); writeMutation(setMutation("coll/doc3", map("key", true))); writeMutation(setMutation("coll/doc4", map("key", Double.NaN))); - writeMutation(setMutation("coll/doc5", map("key", FieldValue.int32(1)))); + writeMutation(setMutation("coll/doc5", map("key", new Int32Value(1)))); writeMutation(setMutation("coll/doc6", map("key", 2.0))); writeMutation(setMutation("coll/doc7", map("key", 3))); writeMutation(setMutation("coll/doc8", map("key", new Timestamp(100, 123456000)))); - writeMutation(setMutation("coll/doc9", map("key", FieldValue.bsonTimestamp(1, 2)))); + writeMutation(setMutation("coll/doc9", map("key", new BsonTimestamp(1, 2)))); writeMutation(setMutation("coll/doc10", map("key", "string"))); writeMutation(setMutation("coll/doc11", map("key", blob(1, 2, 3)))); writeMutation( - setMutation("coll/doc12", map("key", FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})))); + setMutation("coll/doc12", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})))); writeMutation(setMutation("coll/doc13", map("key", ref("foo/bar")))); writeMutation( - setMutation("coll/doc14", map("key", FieldValue.bsonObjectId("507f191e810c19729de860ea")))); + setMutation("coll/doc14", map("key", new BsonObjectId("507f191e810c19729de860ea")))); writeMutation(setMutation("coll/doc15", map("key", new GeoPoint(1, 2)))); - writeMutation(setMutation("coll/doc16", map("key", FieldValue.regex("^bar", "m")))); + writeMutation(setMutation("coll/doc16", map("key", new RegexValue("^bar", "m")))); writeMutation(setMutation("coll/doc17", map("key", Arrays.asList(2, "foo")))); writeMutation(setMutation("coll/doc18", map("key", FieldValue.vector(new double[] {1, 2, 3})))); writeMutation(setMutation("coll/doc19", map("key", map("bar", 1, "foo", 2)))); - writeMutation(setMutation("coll/doc20", map("key", FieldValue.maxKey()))); + writeMutation(setMutation("coll/doc20", map("key", MaxKey.instance()))); backfillIndexes(); diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java index f4d83b2be91..66b0ff937d7 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java @@ -87,15 +87,15 @@ public void testValueEquality() { BsonTimestamp bsonTimestamp2 = new BsonTimestamp(1, 3); BsonTimestamp bsonTimestamp3 = new BsonTimestamp(2, 2); - Int32Value int32Value1 = FieldValue.int32(1); - Int32Value int32Value2 = FieldValue.int32(2); + Int32Value int32Value1 = new Int32Value(1); + Int32Value int32Value2 = new Int32Value(2); - RegexValue regexValue1 = FieldValue.regex("^foo", "i"); - RegexValue regexValue2 = FieldValue.regex("^foo", "m"); - RegexValue regexValue3 = FieldValue.regex("^bar", "i"); + RegexValue regexValue1 = new RegexValue("^foo", "i"); + RegexValue regexValue2 = new RegexValue("^foo", "m"); + RegexValue regexValue3 = new RegexValue("^bar", "i"); - MinKey minKey = FieldValue.minKey(); - MaxKey maxKey = FieldValue.maxKey(); + MinKey minKey = MinKey.instance(); + MaxKey maxKey = MaxKey.instance(); new EqualsTester() .addEqualityGroup(wrap(true), wrap(true)) @@ -150,13 +150,13 @@ public void testValueEquality() { .addEqualityGroup(wrap(new BsonTimestamp(1, 2)), wrap(bsonTimestamp1)) .addEqualityGroup(wrap(new BsonTimestamp(1, 3)), wrap(bsonTimestamp2)) .addEqualityGroup(wrap(new BsonTimestamp(2, 2)), wrap(bsonTimestamp3)) - .addEqualityGroup(wrap(FieldValue.int32(1)), wrap(int32Value1)) - .addEqualityGroup(wrap(FieldValue.int32(2)), wrap(int32Value2)) - .addEqualityGroup(wrap(FieldValue.regex("^foo", "i")), wrap(regexValue1)) - .addEqualityGroup(wrap(FieldValue.regex("^foo", "m")), wrap(regexValue2)) - .addEqualityGroup(wrap(FieldValue.regex("^bar", "i")), wrap(regexValue3)) - .addEqualityGroup(wrap(FieldValue.minKey()), wrap(minKey)) - .addEqualityGroup(wrap(FieldValue.maxKey()), wrap(maxKey)) + .addEqualityGroup(wrap(new Int32Value(1)), wrap(int32Value1)) + .addEqualityGroup(wrap(new Int32Value(2)), wrap(int32Value2)) + .addEqualityGroup(wrap(new RegexValue("^foo", "i")), wrap(regexValue1)) + .addEqualityGroup(wrap(new RegexValue("^foo", "m")), wrap(regexValue2)) + .addEqualityGroup(wrap(new RegexValue("^bar", "i")), wrap(regexValue3)) + .addEqualityGroup(wrap(MinKey.instance()), wrap(minKey)) + .addEqualityGroup(wrap(MaxKey.instance()), wrap(maxKey)) .testEquals(); } @@ -170,7 +170,7 @@ public void testValueOrdering() { .addEqualityGroup(wrap((Object) null)) // MinKey is after null - .addEqualityGroup(wrap(FieldValue.minKey()), wrap(MinKey.instance())) + .addEqualityGroup(wrap(MinKey.instance())) // booleans .addEqualityGroup(wrap(false)) @@ -181,27 +181,20 @@ public void testValueOrdering() { .addEqualityGroup(wrap(Double.NEGATIVE_INFINITY)) .addEqualityGroup(wrap(-Double.MAX_VALUE)) .addEqualityGroup(wrap(Long.MIN_VALUE)) - .addEqualityGroup( - wrap(new Int32Value(-2147483648)), - wrap(FieldValue.int32(-2147483648)), - wrap(Integer.MIN_VALUE)) + .addEqualityGroup(wrap(new Int32Value(-2147483648)), wrap(Integer.MIN_VALUE)) .addEqualityGroup(wrap(-1.1)) .addEqualityGroup(wrap(-1.0)) .addEqualityGroup(wrap(-Double.MIN_NORMAL)) .addEqualityGroup(wrap(-Double.MIN_VALUE)) // Zeros all compare the same. - .addEqualityGroup( - wrap(-0.0), wrap(0.0), wrap(0L), wrap(new Int32Value(0)), wrap(FieldValue.int32(0))) + .addEqualityGroup(wrap(-0.0), wrap(0.0), wrap(0L), wrap(new Int32Value(0))) .addEqualityGroup(wrap(Double.MIN_VALUE)) .addEqualityGroup(wrap(Double.MIN_NORMAL)) .addEqualityGroup(wrap(0.1)) // Doubles, Longs, Int32Values compareTo() the same. .addEqualityGroup(wrap(1.0), wrap(1L), wrap(new Int32Value(1))) .addEqualityGroup(wrap(1.1)) - .addEqualityGroup( - wrap(new Int32Value(2147483647)), - wrap(FieldValue.int32(2147483647)), - wrap(Integer.MAX_VALUE)) + .addEqualityGroup(wrap(new Int32Value(2147483647)), wrap(Integer.MAX_VALUE)) .addEqualityGroup(wrap(Long.MAX_VALUE)) .addEqualityGroup(wrap(Double.MAX_VALUE)) .addEqualityGroup(wrap(Double.POSITIVE_INFINITY)) @@ -211,7 +204,7 @@ public void testValueOrdering() { .addEqualityGroup(wrap(date2)) // bson timestamps - .addEqualityGroup(wrap(new BsonTimestamp(123, 4)), wrap(FieldValue.bsonTimestamp(123, 4))) + .addEqualityGroup(wrap(new BsonTimestamp(123, 4))) .addEqualityGroup(wrap(new BsonTimestamp(123, 5))) .addEqualityGroup(wrap(new BsonTimestamp(124, 0))) @@ -241,8 +234,7 @@ public void testValueOrdering() { // bson binary data .addEqualityGroup( wrap(BsonBinaryData.fromBytes(1, new byte[] {})), - wrap(BsonBinaryData.fromByteString(1, ByteString.EMPTY)), - wrap(FieldValue.bsonBinaryData(1, new byte[] {}))) + wrap(BsonBinaryData.fromByteString(1, ByteString.EMPTY))) .addEqualityGroup(wrap(BsonBinaryData.fromBytes(1, new byte[] {0}))) .addEqualityGroup(wrap(BsonBinaryData.fromBytes(5, new byte[] {1, 2}))) .addEqualityGroup(wrap(BsonBinaryData.fromBytes(5, new byte[] {1, 2, 3}))) @@ -257,9 +249,7 @@ public void testValueOrdering() { .addEqualityGroup(wrap(wrapRef(dbId("p2", "d1"), key("c1/doc1")))) // bson object id - .addEqualityGroup( - wrap(new BsonObjectId("507f191e810c19729de860ea")), - wrap(FieldValue.bsonObjectId("507f191e810c19729de860ea"))) + .addEqualityGroup(wrap(new BsonObjectId("507f191e810c19729de860ea"))) .addEqualityGroup(wrap(new BsonObjectId("507f191e810c19729de860eb"))) // latin small letter e + combining acute accent + latin small letter b .addEqualityGroup(wrap(new BsonObjectId("e\u0301b"))) @@ -282,7 +272,7 @@ public void testValueOrdering() { .addEqualityGroup(wrap(new GeoPoint(90, 180))) // regex - .addEqualityGroup(wrap(new RegexValue("^foo", "i")), wrap(FieldValue.regex("^foo", "i"))) + .addEqualityGroup(wrap(new RegexValue("^foo", "i"))) .addEqualityGroup(wrap(new RegexValue("^foo", "m"))) .addEqualityGroup(wrap(new RegexValue("^zoo", "i"))) // latin small letter e + combining acute accent + latin small letter b @@ -311,7 +301,7 @@ public void testValueOrdering() { .addEqualityGroup(wrap(map("foo", "0"))) // MaxKey is last - .addEqualityGroup(wrap(FieldValue.maxKey()), wrap(MaxKey.instance())) + .addEqualityGroup(wrap(MaxKey.instance())) .testCompare(); } @@ -323,7 +313,7 @@ public void testLowerBound() { // lower bound of MinKey is MinKey .addEqualityGroup( - wrap(getLowerBound(TestUtil.wrap(FieldValue.minKey()))), wrap(FieldValue.minKey())) + wrap(getLowerBound(TestUtil.wrap(MinKey.instance()))), wrap(MinKey.instance())) // booleans .addEqualityGroup(wrap(false), wrap(getLowerBound(TestUtil.wrap(true)))) @@ -333,7 +323,7 @@ public void testLowerBound() { .addEqualityGroup( wrap(getLowerBound(TestUtil.wrap(1.0))), wrap(Double.NaN), - wrap(getLowerBound(TestUtil.wrap(FieldValue.int32(1))))) + wrap(getLowerBound(TestUtil.wrap(new Int32Value(1))))) .addEqualityGroup(wrap(Double.NEGATIVE_INFINITY)) .addEqualityGroup(wrap(Long.MIN_VALUE)) @@ -380,9 +370,9 @@ public void testLowerBound() { // regular expressions .addEqualityGroup( - wrap(getLowerBound(TestUtil.wrap(FieldValue.regex("^foo", "i")))), - wrap(FieldValue.regex("", ""))) - .addEqualityGroup(wrap(FieldValue.regex("^foo", "i"))) + wrap(getLowerBound(TestUtil.wrap(new RegexValue("^foo", "i")))), + wrap(new RegexValue("", ""))) + .addEqualityGroup(wrap(new RegexValue("^foo", "i"))) // arrays .addEqualityGroup( @@ -404,7 +394,7 @@ public void testLowerBound() { .addEqualityGroup(wrap(getLowerBound(TestUtil.wrap(map("foo", "bar")))), wrap(map())) // maxKey - .addEqualityGroup(wrap(FieldValue.maxKey())) + .addEqualityGroup(wrap(MaxKey.instance())) .testCompare(); } @@ -416,19 +406,17 @@ public void testUpperBound() { // upper value of null is MinKey .addEqualityGroup( - wrap(getUpperBound(TestUtil.wrap((Object) null))), - wrap(FieldValue.minKey()), - wrap(MinKey.instance())) + wrap(getUpperBound(TestUtil.wrap((Object) null))), wrap(MinKey.instance())) // upper value of MinKey is boolean `false` - .addEqualityGroup(wrap(false), wrap(getUpperBound(TestUtil.wrap(FieldValue.minKey())))) + .addEqualityGroup(wrap(false), wrap(getUpperBound(TestUtil.wrap(MinKey.instance())))) // booleans .addEqualityGroup(wrap(true)) .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(false)))) // numbers - .addEqualityGroup(wrap(FieldValue.int32(2147483647))) // largest int32 value + .addEqualityGroup(wrap(new Int32Value(2147483647))) // largest int32 value .addEqualityGroup(wrap(Long.MAX_VALUE)) .addEqualityGroup(wrap(Double.POSITIVE_INFINITY)) .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(1.0)))) @@ -468,8 +456,8 @@ public void testUpperBound() { .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(new GeoPoint(90, 180))))) // regular expressions - .addEqualityGroup(wrap(FieldValue.regex("^foo", "i"))) - .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(FieldValue.regex("", ""))))) + .addEqualityGroup(wrap(new RegexValue("^foo", "i"))) + .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(new RegexValue("", ""))))) // arrays .addEqualityGroup(wrap(Collections.singletonList(false))) @@ -489,9 +477,7 @@ public void testUpperBound() { // upper value of objects is MaxKey .addEqualityGroup( - wrap(getUpperBound(TestUtil.wrap(map("a", "b")))), - wrap(FieldValue.maxKey()), - wrap(MaxKey.instance())) + wrap(getUpperBound(TestUtil.wrap(map("a", "b")))), wrap(MaxKey.instance())) .testCompare(); } @@ -513,21 +499,19 @@ public void testCanonicalIds() { TestUtil.wrap(map("a", Arrays.asList("b", map("c", new GeoPoint(30, 60))))), "{a:[b,{c:geo(30.0,60.0)}]}"); - assertCanonicalId( - TestUtil.wrap(FieldValue.regex("a", "b")), "{__regex__:{options:b,pattern:a}}"); + assertCanonicalId(TestUtil.wrap(new RegexValue("a", "b")), "{__regex__:{options:b,pattern:a}}"); - assertCanonicalId(TestUtil.wrap(FieldValue.bsonObjectId("foo")), "{__oid__:foo}"); + assertCanonicalId(TestUtil.wrap(new BsonObjectId("foo")), "{__oid__:foo}"); assertCanonicalId( - TestUtil.wrap(FieldValue.bsonTimestamp(1, 2)), - "{__request_timestamp__:{increment:2,seconds:1}}"); - assertCanonicalId((TestUtil.wrap(FieldValue.int32(1))), "{__int__:1}"); + TestUtil.wrap(new BsonTimestamp(1, 2)), "{__request_timestamp__:{increment:2,seconds:1}}"); + assertCanonicalId((TestUtil.wrap(new Int32Value(1))), "{__int__:1}"); assertCanonicalId( - TestUtil.wrap(FieldValue.bsonBinaryData(1, new byte[] {1, 2, 3})), "{__binary__:01010203}"); + TestUtil.wrap(BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})), "{__binary__:01010203}"); assertCanonicalId( - TestUtil.wrap(FieldValue.bsonBinaryData(128, new byte[] {1, 2, 3})), + TestUtil.wrap(BsonBinaryData.fromBytes(128, new byte[] {1, 2, 3})), "{__binary__:80010203}"); - assertCanonicalId(TestUtil.wrap(FieldValue.minKey()), "{__min__:null}"); - assertCanonicalId(TestUtil.wrap(FieldValue.maxKey()), "{__max__:null}"); + assertCanonicalId(TestUtil.wrap(MinKey.instance()), "{__min__:null}"); + assertCanonicalId(TestUtil.wrap(MaxKey.instance()), "{__max__:null}"); } @Test @@ -542,14 +526,14 @@ private void assertCanonicalId(Value proto, String expectedCanonicalId) { @Test public void DetectsBsonTypesCorrectly() { - Value minKeyValue = TestUtil.wrap(FieldValue.minKey()); - Value maxKeyValue = TestUtil.wrap(FieldValue.maxKey()); - Value int32Value = TestUtil.wrap(FieldValue.int32(1)); - Value regexValue = TestUtil.wrap(FieldValue.regex("^foo", "i")); - Value bsonTimestampValue = TestUtil.wrap(FieldValue.bsonTimestamp(1, 2)); - Value bsonObjectIdValue = TestUtil.wrap(FieldValue.bsonObjectId("foo")); - Value bsonBinaryDataValue1 = TestUtil.wrap(FieldValue.bsonBinaryData(1, new byte[] {})); - Value bsonBinaryDataValue2 = TestUtil.wrap(FieldValue.bsonBinaryData(1, new byte[] {1, 2, 4})); + Value minKeyValue = TestUtil.wrap(MinKey.instance()); + Value maxKeyValue = TestUtil.wrap(MaxKey.instance()); + Value int32Value = TestUtil.wrap(new Int32Value(1)); + Value regexValue = TestUtil.wrap(new RegexValue("^foo", "i")); + Value bsonTimestampValue = TestUtil.wrap(new BsonTimestamp(1, 2)); + Value bsonObjectIdValue = TestUtil.wrap(new BsonObjectId("foo")); + Value bsonBinaryDataValue1 = TestUtil.wrap(BsonBinaryData.fromBytes(1, new byte[] {})); + Value bsonBinaryDataValue2 = TestUtil.wrap(BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})); assertTrue(Values.isMinKey(minKeyValue.getMapValue().getFieldsMap())); assertFalse(Values.isMinKey(maxKeyValue.getMapValue().getFieldsMap())); diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/remote/RemoteSerializerTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/remote/RemoteSerializerTest.java index 37cf187bc07..26f665b38f2 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/remote/RemoteSerializerTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/remote/RemoteSerializerTest.java @@ -39,9 +39,15 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; +import com.google.firebase.firestore.BsonBinaryData; +import com.google.firebase.firestore.BsonObjectId; +import com.google.firebase.firestore.BsonTimestamp; import com.google.firebase.firestore.DocumentReference; import com.google.firebase.firestore.FieldValue; import com.google.firebase.firestore.GeoPoint; +import com.google.firebase.firestore.MaxKey; +import com.google.firebase.firestore.MinKey; +import com.google.firebase.firestore.RegexValue; import com.google.firebase.firestore.core.ArrayContainsAnyFilter; import com.google.firebase.firestore.core.FieldFilter; import com.google.firebase.firestore.core.InFilter; @@ -331,7 +337,7 @@ public void testEncodesVectorValue() { @Test public void testEncodesBsonObjectIds() { - Value model = wrap(FieldValue.bsonObjectId("foo")); + Value model = wrap(new BsonObjectId("foo")); Value proto = Value.newBuilder() @@ -346,7 +352,7 @@ public void testEncodesBsonObjectIds() { @Test public void testEncodesBsonTimestamps() { - Value model = wrap(FieldValue.bsonTimestamp(12345, 67)); + Value model = wrap(new BsonTimestamp(12345, 67)); Value proto = Value.newBuilder() @@ -372,7 +378,7 @@ public void testEncodesBsonTimestamps() { @Test public void testEncodesBsonBinaryData() { - Value model = wrap(FieldValue.bsonBinaryData(127, new byte[] {1, 2, 3})); + Value model = wrap(BsonBinaryData.fromBytes(127, new byte[] {1, 2, 3})); Value proto = Value.newBuilder() @@ -391,7 +397,7 @@ public void testEncodesBsonBinaryData() { @Test public void testEncodesRegexValues() { - Value model = wrap(FieldValue.regex("^foo", "i")); + Value model = wrap(new RegexValue("^foo", "i")); Value proto = Value.newBuilder() .setMapValue( @@ -416,7 +422,7 @@ public void testEncodesRegexValues() { @Test public void testEncodesInt32Values() { - Value model = wrap(FieldValue.int32(12345)); + Value model = wrap(new com.google.firebase.firestore.Int32Value(12345)); Value proto = Value.newBuilder() @@ -431,7 +437,7 @@ public void testEncodesInt32Values() { @Test public void testEncodesMinKey() { - Value model = wrap(FieldValue.minKey()); + Value model = wrap(MinKey.instance()); Value proto = Value.newBuilder() @@ -447,7 +453,7 @@ public void testEncodesMinKey() { @Test public void testEncodesMaxKey() { - Value model = wrap(FieldValue.maxKey()); + Value model = wrap(MaxKey.instance()); Value proto = Value.newBuilder() From 31e32906ad24e1d7c4ebe04f75e5087899d3ff61 Mon Sep 17 00:00:00 2001 From: Ehsan Nasiri Date: Fri, 2 May 2025 14:03:53 -0700 Subject: [PATCH 4/8] Fix merge conflicts. --- .../firebase/firestore/BsonTypesTest.java | 48 +++++++++---------- .../google/firebase/firestore/TypeTest.java | 2 +- 2 files changed, 25 insertions(+), 25 deletions(-) diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java index 2dad1a7a678..1d45da5312a 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java @@ -272,14 +272,14 @@ public void filterAndOrderBsonObjectIds() throws Exception { .orderBy("key", Direction.DESCENDING) .whereGreaterThan("key", new BsonObjectId("507f191e810c19729de860ea")); - assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); + assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c", "b")); orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) .whereNotEqualTo("key", new BsonObjectId("507f191e810c19729de860eb")); - assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "a")); + assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c", "a")); } @Test @@ -302,14 +302,14 @@ public void filterAndOrderBsonTimestamps() throws Exception { .orderBy("key", Direction.DESCENDING) .whereGreaterThan("key", new BsonTimestamp(1, 1)); - assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); + assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c", "b")); orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) .whereNotEqualTo("key", new BsonTimestamp(1, 2)); - assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "a")); + assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c", "a")); } @Test @@ -332,14 +332,14 @@ public void filterAndOrderBsonBinaryData() throws Exception { .orderBy("key", Direction.DESCENDING) .whereGreaterThan("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})); - assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); + assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c", "b")); orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) .whereNotEqualTo("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})); - assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "a")); + assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c", "a")); } @Test @@ -359,14 +359,14 @@ public void filterAndOrderRegex() throws Exception { .orderBy("key", Direction.DESCENDING) .whereGreaterThan("key", new RegexValue("^bar", "i")); - assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); + assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c", "b")); orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) .whereNotEqualTo("key", new RegexValue("^bar", "m")); - assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "a")); + assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c", "a")); } @Test @@ -384,12 +384,12 @@ public void filterAndOrderInt32() throws Exception { Query orderedQuery = randomColl.orderBy("key", Direction.DESCENDING).whereGreaterThan("key", new Int32Value(-1)); - assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "b")); + assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c", "b")); orderedQuery = randomColl.orderBy("key", Direction.DESCENDING).whereNotEqualTo("key", new Int32Value(1)); - assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, Arrays.asList("c", "a")); + assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c", "a")); } @Test @@ -414,23 +414,23 @@ public void filterAndOrderMinKey() throws Exception { .DESCENDING) // minKeys are equal, would sort by documentId as secondary order .whereEqualTo("key", MinKey.instance()); - assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("b", "a")); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("b", "a")); // TODO(Mila/BSON): uncomment this test when null value inclusion is fixed // query = randomColl.whereNotEqualTo("key", MinKey.instance()); // assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("d", "e")); query = randomColl.whereGreaterThanOrEqualTo("key", MinKey.instance()); - assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("a", "b")); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("a", "b")); query = randomColl.whereLessThanOrEqualTo("key", MinKey.instance()); - assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("a", "b")); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("a", "b")); query = randomColl.whereGreaterThan("key", MinKey.instance()); - assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList()); query = randomColl.whereGreaterThan("key", MinKey.instance()); - assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList()); } @Test @@ -455,23 +455,23 @@ public void filterAndOrderMaxKey() throws Exception { .DESCENDING) // maxKeys are equal, would sort by documentId as secondary order .whereEqualTo("key", MaxKey.instance()); - assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("d", "c")); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("d", "c")); // TODO(Mila/BSON): uncomment this test when null value inclusion is fixed // query = randomColl.whereNotEqualTo("key", MaxKey.instance()); - // assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("a", "b")); + // assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("a", "b")); query = randomColl.whereGreaterThanOrEqualTo("key", MaxKey.instance()); - assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("c", "d")); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("c", "d")); query = randomColl.whereLessThanOrEqualTo("key", MaxKey.instance()); - assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("c", "d")); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("c", "d")); query = randomColl.whereLessThan("key", MaxKey.instance()); - assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList()); query = randomColl.whereGreaterThan("key", MaxKey.instance()); - assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList()); } @Test @@ -489,10 +489,10 @@ public void filterNullValueWithBsonTypes() throws Exception { waitFor(randomColl.get()); Query query = randomColl.whereEqualTo("key", null); - assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("b", "c")); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("b", "c")); query = randomColl.whereNotEqualTo("key", null); - assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("a", "d", "e")); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("a", "d", "e")); } @Test @@ -565,7 +565,7 @@ public void orderBsonTypesTogether() throws Exception { "minKey2", "minKey1"); - assertSDKQueryResultsConsistentWithBackend(orderedQuery, docs, expectedDocs); + assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, expectedDocs); } @Test diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java index 2d2fa073599..13ec0646f5a 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java @@ -468,6 +468,6 @@ public void snapshotListenerSortsDifferentTypesSameAsServer() throws Exception { "max"); // Assert that get and snapshot listener requests sort docs in the same, expected order - assertSDKQueryResultsConsistentWithBackend(orderedQuery, testDocs, expectedDocs); + assertSDKQueryResultsConsistentWithBackend(colRef, orderedQuery, testDocs, expectedDocs); } } From 6f7c203bf574d193a7ce3d86d56c28e5c7350abd Mon Sep 17 00:00:00 2001 From: Ehsan Nasiri Date: Fri, 2 May 2025 14:20:38 -0700 Subject: [PATCH 5/8] Fix formatting. --- .../firebase/firestore/BsonTypesTest.java | 33 ++++++++++++------- .../testutil/IntegrationTestUtil.java | 4 ++- 2 files changed, 25 insertions(+), 12 deletions(-) diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java index 1d45da5312a..32340e39886 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java @@ -272,14 +272,16 @@ public void filterAndOrderBsonObjectIds() throws Exception { .orderBy("key", Direction.DESCENDING) .whereGreaterThan("key", new BsonObjectId("507f191e810c19729de860ea")); - assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c", "b")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "b")); orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) .whereNotEqualTo("key", new BsonObjectId("507f191e810c19729de860eb")); - assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c", "a")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "a")); } @Test @@ -302,14 +304,16 @@ public void filterAndOrderBsonTimestamps() throws Exception { .orderBy("key", Direction.DESCENDING) .whereGreaterThan("key", new BsonTimestamp(1, 1)); - assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c", "b")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "b")); orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) .whereNotEqualTo("key", new BsonTimestamp(1, 2)); - assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c", "a")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "a")); } @Test @@ -332,14 +336,16 @@ public void filterAndOrderBsonBinaryData() throws Exception { .orderBy("key", Direction.DESCENDING) .whereGreaterThan("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})); - assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c", "b")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "b")); orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) .whereNotEqualTo("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})); - assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c", "a")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "a")); } @Test @@ -359,14 +365,16 @@ public void filterAndOrderRegex() throws Exception { .orderBy("key", Direction.DESCENDING) .whereGreaterThan("key", new RegexValue("^bar", "i")); - assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c", "b")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "b")); orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) .whereNotEqualTo("key", new RegexValue("^bar", "m")); - assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c", "a")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "a")); } @Test @@ -384,12 +392,14 @@ public void filterAndOrderInt32() throws Exception { Query orderedQuery = randomColl.orderBy("key", Direction.DESCENDING).whereGreaterThan("key", new Int32Value(-1)); - assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c", "b")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "b")); orderedQuery = randomColl.orderBy("key", Direction.DESCENDING).whereNotEqualTo("key", new Int32Value(1)); - assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c", "a")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("c", "a")); } @Test @@ -492,7 +502,8 @@ public void filterNullValueWithBsonTypes() throws Exception { assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("b", "c")); query = randomColl.whereNotEqualTo("key", null); - assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("a", "d", "e")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, query, docs, Arrays.asList("a", "d", "e")); } @Test diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/testutil/IntegrationTestUtil.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/testutil/IntegrationTestUtil.java index cf397db4381..be27c7a5200 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/testutil/IntegrationTestUtil.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/testutil/IntegrationTestUtil.java @@ -610,7 +610,9 @@ public static void checkOnlineAndOfflineResultsMatch( // expected result. public static void assertSDKQueryResultsConsistentWithBackend( Query collection, - Query query, Map> allData, List expectedDocIds) + Query query, + Map> allData, + List expectedDocIds) throws Exception { // Check the cache round trip first to make sure cache is properly populated, otherwise the // snapshot listener below will return partial results from previous From 327f2c407c96b4a5357cadd70e7633822a82cbf5 Mon Sep 17 00:00:00 2001 From: Ehsan Nasiri Date: Fri, 2 May 2025 14:28:47 -0700 Subject: [PATCH 6/8] Add changelog. --- firebase-firestore/CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/firebase-firestore/CHANGELOG.md b/firebase-firestore/CHANGELOG.md index 29416bcf9a4..7800f5b1c4a 100644 --- a/firebase-firestore/CHANGELOG.md +++ b/firebase-firestore/CHANGELOG.md @@ -1,5 +1,5 @@ # Unreleased - +* [feature] Add support for the following new types: MinKey, MaxKey, RegexValue, Int32Value, BsonObjectId, BsonTimestamp, and BsonBinaryData. [#6928](//github.com/firebase/firebase-android-sdk/pull/6928) # 25.1.4 * [fixed] Fixed the `null` value handling in `whereNotEqualTo` and `whereNotIn` filters. From 4417c2b68743786a36907046b869d460276b2940 Mon Sep 17 00:00:00 2001 From: milaGGL <107142260+milaGGL@users.noreply.github.com> Date: Mon, 5 May 2025 12:40:39 -0400 Subject: [PATCH 7/8] resolve TODOs --- .../firebase/firestore/BsonTypesTest.java | 39 ++----------------- 1 file changed, 4 insertions(+), 35 deletions(-) diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java index 32340e39886..989e86311c1 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java @@ -250,8 +250,6 @@ public void listenToDocumentsWithBsonTypes() throws Throwable { } } - // TODO(Mila/BSON): remove the cache population after updating the - // assertSDKQueryResultsConsistentWithBackend @Test public void filterAndOrderBsonObjectIds() throws Exception { Map> docs = @@ -264,9 +262,6 @@ public void filterAndOrderBsonObjectIds() throws Exception { map("key", new BsonObjectId("507f191e810c19729de860ec"))); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); - // Pre-populate the cache with all docs - waitFor(randomColl.get()); - Query orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) @@ -296,9 +291,6 @@ public void filterAndOrderBsonTimestamps() throws Exception { map("key", new BsonTimestamp(2, 1))); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); - // Pre-populate the cache with all docs - waitFor(randomColl.get()); - Query orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) @@ -328,9 +320,6 @@ public void filterAndOrderBsonBinaryData() throws Exception { map("key", BsonBinaryData.fromBytes(2, new byte[] {1, 2, 2}))); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); - // Pre-populate the cache with all docs - waitFor(randomColl.get()); - Query orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) @@ -357,9 +346,6 @@ public void filterAndOrderRegex() throws Exception { "c", map("key", new RegexValue("^baz", "i"))); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); - // Pre-populate the cache with all docs - waitFor(randomColl.get()); - Query orderedQuery = randomColl .orderBy("key", Direction.DESCENDING) @@ -386,9 +372,6 @@ public void filterAndOrderInt32() throws Exception { "c", map("key", new Int32Value(2))); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); - // Pre-populate the cache with all docs - waitFor(randomColl.get()); - Query orderedQuery = randomColl.orderBy("key", Direction.DESCENDING).whereGreaterThan("key", new Int32Value(-1)); @@ -413,9 +396,6 @@ public void filterAndOrderMinKey() throws Exception { "e", map("key", MaxKey.instance())); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); - // Pre-populate the cache with all docs - waitFor(randomColl.get()); - Query query = randomColl .orderBy( @@ -426,9 +406,8 @@ public void filterAndOrderMinKey() throws Exception { assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("b", "a")); - // TODO(Mila/BSON): uncomment this test when null value inclusion is fixed - // query = randomColl.whereNotEqualTo("key", MinKey.instance()); - // assertSDKQueryResultsConsistentWithBackend(query, docs, Arrays.asList("d", "e")); + query = randomColl.whereNotEqualTo("key", MinKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("d", "e")); query = randomColl.whereGreaterThanOrEqualTo("key", MinKey.instance()); assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("a", "b")); @@ -454,9 +433,6 @@ public void filterAndOrderMaxKey() throws Exception { "e", map("key", null)); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); - // Pre-populate the cache with all docs - waitFor(randomColl.get()); - Query query = randomColl .orderBy( @@ -467,9 +443,8 @@ public void filterAndOrderMaxKey() throws Exception { assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("d", "c")); - // TODO(Mila/BSON): uncomment this test when null value inclusion is fixed - // query = randomColl.whereNotEqualTo("key", MaxKey.instance()); - // assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("a", "b")); + query = randomColl.whereNotEqualTo("key", MaxKey.instance()); + assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("a", "b")); query = randomColl.whereGreaterThanOrEqualTo("key", MaxKey.instance()); assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("c", "d")); @@ -495,9 +470,6 @@ public void filterNullValueWithBsonTypes() throws Exception { "e", map("key", MaxKey.instance())); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); - // Pre-populate the cache with all docs - waitFor(randomColl.get()); - Query query = randomColl.whereEqualTo("key", null); assertSDKQueryResultsConsistentWithBackend(randomColl, query, docs, Arrays.asList("b", "c")); @@ -550,9 +522,6 @@ public void orderBsonTypesTogether() throws Exception { map("key", MaxKey.instance())); CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); - // Pre-populate the cache with all docs - waitFor(randomColl.get()); - Query orderedQuery = randomColl.orderBy("key", Direction.DESCENDING); List expectedDocs = Arrays.asList( From 6c8ded1ccd4b42e409cb8e81264efb3d3fdd36ed Mon Sep 17 00:00:00 2001 From: Mila <107142260+milaGGL@users.noreply.github.com> Date: Fri, 27 Jun 2025 16:48:02 -0400 Subject: [PATCH 8/8] Add decimal128 support (#579) * initial code * add unit tests * add integration tests * update tests * update tests * Update BsonTypesTest.java * add more tests * fix a bug in comparing decimal128 value * resolve comments * update the comment regarding decimal128 NotIn&NaN test * update the Quadruple class * resolve comments * hide the Quadruple, QuadrupleBuilder from api.txt --- firebase-firestore/api.txt | 6 + .../firebase/firestore/BsonTypesTest.java | 216 ++++- .../google/firebase/firestore/POJOTest.java | 15 +- .../google/firebase/firestore/TypeTest.java | 87 +- .../core/number/NumberComparisonHelper.java | 25 + .../firebase/firestore/Decimal128Value.java | 68 ++ .../firebase/firestore/DocumentSnapshot.java | 12 + .../firebase/firestore/FirebaseFirestore.java | 1 - .../google/firebase/firestore/Quadruple.java | 309 +++++++ .../firebase/firestore/QuadrupleBuilder.java | 822 ++++++++++++++++++ .../firebase/firestore/UserDataReader.java | 11 +- .../firebase/firestore/UserDataWriter.java | 10 +- .../index/FirestoreIndexValueWriter.java | 135 +-- .../firebase/firestore/model/Values.java | 397 +++++---- .../NumericIncrementTransformOperation.java | 10 +- .../firestore/util/CustomClassMapper.java | 14 + .../google/firebase/firestore/util/Util.java | 6 + .../firebase/firestore/BsonTypesTest.java | 123 ++- .../firebase/firestore/FieldValueTest.java | 20 + .../firestore/UserDataWriterTest.java | 20 + .../bundle/BundleSerializerTest.java | 12 + .../firebase/firestore/core/QueryTest.java | 5 + .../index/FirestoreIndexValueWriterTest.java | 108 +++ .../local/SQLiteIndexManagerTest.java | 193 ++-- .../firestore/local/SQLiteLocalStoreTest.java | 273 +++++- .../firebase/firestore/model/ValuesTest.java | 208 +++-- .../remote/RemoteSerializerTest.java | 15 + 27 files changed, 2696 insertions(+), 425 deletions(-) create mode 100644 firebase-firestore/src/main/java/com/google/firebase/firestore/Decimal128Value.java create mode 100644 firebase-firestore/src/main/java/com/google/firebase/firestore/Quadruple.java create mode 100644 firebase-firestore/src/main/java/com/google/firebase/firestore/QuadrupleBuilder.java diff --git a/firebase-firestore/api.txt b/firebase-firestore/api.txt index f14627e2905..f71721ba3f5 100644 --- a/firebase-firestore/api.txt +++ b/firebase-firestore/api.txt @@ -75,6 +75,11 @@ package com.google.firebase.firestore { method public String getPath(); } + public final class Decimal128Value { + ctor public Decimal128Value(String); + field public final String! stringValue; + } + public class DocumentChange { method public com.google.firebase.firestore.QueryDocumentSnapshot getDocument(); method public int getNewIndex(); @@ -135,6 +140,7 @@ package com.google.firebase.firestore { method public java.util.Map? getData(com.google.firebase.firestore.DocumentSnapshot.ServerTimestampBehavior); method public java.util.Date? getDate(String); method public java.util.Date? getDate(String, com.google.firebase.firestore.DocumentSnapshot.ServerTimestampBehavior); + method public com.google.firebase.firestore.Decimal128Value? getDecimal128Value(String); method public com.google.firebase.firestore.DocumentReference? getDocumentReference(String); method public Double? getDouble(String); method public com.google.firebase.firestore.GeoPoint? getGeoPoint(String); diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java index 989e86311c1..badbbc7b0d5 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/BsonTypesTest.java @@ -19,6 +19,8 @@ import static com.google.firebase.firestore.testutil.IntegrationTestUtil.testCollectionWithDocsOnNightly; import static com.google.firebase.firestore.testutil.IntegrationTestUtil.waitFor; import static com.google.firebase.firestore.testutil.TestUtil.map; +import static java.lang.Double.NaN; +import static java.lang.Double.POSITIVE_INFINITY; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; @@ -54,6 +56,7 @@ public void writeAndReadBsonTypes() throws ExecutionException, InterruptedExcept "bsonTimestamp", new BsonTimestamp(1, 2), "bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), "int32", new Int32Value(1), + "decimal128", new Decimal128Value("1.2e3"), "minKey", MinKey.instance(), "maxKey", MaxKey.instance()))); @@ -75,6 +78,7 @@ public void writeAndReadBsonTypes() throws ExecutionException, InterruptedExcept expected.put("bsonTimestamp", new BsonTimestamp(1, 3)); expected.put("bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})); expected.put("int32", new Int32Value(2)); + expected.put("decimal128", new Decimal128Value("1.2e3")); expected.put("minKey", MinKey.instance()); expected.put("maxKey", MaxKey.instance()); @@ -85,6 +89,7 @@ public void writeAndReadBsonTypes() throws ExecutionException, InterruptedExcept assertTrue(actual.get("bsonTimestamp") instanceof BsonTimestamp); assertTrue(actual.get("bsonBinary") instanceof BsonBinaryData); assertTrue(actual.get("int32") instanceof Int32Value); + assertTrue(actual.get("decimal128") instanceof Decimal128Value); assertTrue(actual.get("minKey") instanceof MinKey); assertTrue(actual.get("maxKey") instanceof MaxKey); assertEquals(expected, actual.getData()); @@ -101,13 +106,22 @@ public void writeAndReadBsonTypeOffline() throws ExecutionException, Interrupted Map expected = new HashMap<>(); docRef.set( map( - "bsonObjectId", new BsonObjectId("507f191e810c19729de860ea"), - "regex", new RegexValue("^foo", "i"), - "bsonTimestamp", new BsonTimestamp(1, 2), - "bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), - "int32", new Int32Value(1), - "minKey", MinKey.instance(), - "maxKey", MaxKey.instance())); + "bsonObjectId", + new BsonObjectId("507f191e810c19729de860ea"), + "regex", + new RegexValue("^foo", "i"), + "bsonTimestamp", + new BsonTimestamp(1, 2), + "bsonBinary", + BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + "int32", + new Int32Value(1), + "decimal128", + new Decimal128Value("1.2e3"), + "minKey", + MinKey.instance(), + "maxKey", + MaxKey.instance())); docRef.update( map( @@ -123,6 +137,7 @@ public void writeAndReadBsonTypeOffline() throws ExecutionException, Interrupted expected.put("bsonTimestamp", new BsonTimestamp(1, 3)); expected.put("bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})); expected.put("int32", new Int32Value(1)); + expected.put("decimal128", new Decimal128Value("1.2e3")); expected.put("minKey", MinKey.instance()); expected.put("maxKey", MaxKey.instance()); @@ -133,6 +148,7 @@ public void writeAndReadBsonTypeOffline() throws ExecutionException, Interrupted assertTrue(actual.get("bsonTimestamp") instanceof BsonTimestamp); assertTrue(actual.get("bsonBinary") instanceof BsonBinaryData); assertTrue(actual.get("int32") instanceof Int32Value); + assertTrue(actual.get("decimal128") instanceof Decimal128Value); assertTrue(actual.get("minKey") instanceof MinKey); assertTrue(actual.get("maxKey") instanceof MaxKey); assertEquals(expected, actual.getData()); @@ -174,6 +190,8 @@ public void listenToDocumentsWithBsonTypes() throws Throwable { BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), "int32", new Int32Value(1), + "decimal128", + new Decimal128Value("1.2e3"), "minKey", MinKey.instance(), "maxKey", @@ -192,6 +210,9 @@ public void listenToDocumentsWithBsonTypes() throws Throwable { assertEquals( docSnap.getBsonTimestamp("bsonTimestamp"), new BsonTimestamp(1, 2)); assertEquals(docSnap.getInt32Value("int32"), new Int32Value(1)); + assertEquals( + docSnap.getDecimal128Value("decimal128"), + new Decimal128Value("1.2e3")); assertEquals(docSnap.getMinKey("minKey"), MinKey.instance()); assertEquals(docSnap.getMaxKey("maxKey"), MaxKey.instance()); @@ -266,7 +287,6 @@ public void filterAndOrderBsonObjectIds() throws Exception { randomColl .orderBy("key", Direction.DESCENDING) .whereGreaterThan("key", new BsonObjectId("507f191e810c19729de860ea")); - assertSDKQueryResultsConsistentWithBackend( randomColl, orderedQuery, docs, Arrays.asList("c", "b")); @@ -274,7 +294,6 @@ public void filterAndOrderBsonObjectIds() throws Exception { randomColl .orderBy("key", Direction.DESCENDING) .whereNotEqualTo("key", new BsonObjectId("507f191e810c19729de860eb")); - assertSDKQueryResultsConsistentWithBackend( randomColl, orderedQuery, docs, Arrays.asList("c", "a")); } @@ -385,6 +404,77 @@ public void filterAndOrderInt32() throws Exception { randomColl, orderedQuery, docs, Arrays.asList("c", "a")); } + @Test + public void filterAndOrderDecimal128() throws Exception { + Map> docs = + map( + "a", + map("key", new Decimal128Value("-1.2e3")), + "b", + map("key", new Decimal128Value("0")), + "c", + map("key", new Decimal128Value("1.2e3")), + "d", + map("key", new Decimal128Value("NaN")), + "e", + map("key", new Decimal128Value("-Infinity")), + "f", + map("key", new Decimal128Value("Infinity"))); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereGreaterThan("key", new Decimal128Value("-1.2e3")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("f", "c", "b")); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereGreaterThan("key", new Decimal128Value("-1.2e-3")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("f", "c", "b")); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereNotEqualTo("key", new Decimal128Value("0.0")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("f", "c", "a", "e", "d")); + + orderedQuery = randomColl.whereNotEqualTo("key", new Decimal128Value("NaN")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("e", "a", "b", "c", "f")); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereEqualTo("key", new Decimal128Value("1.2e3")); + assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c")); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereNotEqualTo("key", new Decimal128Value("1.2e3")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("f", "b", "a", "e", "d")); + + // Note: server is sending NaN incorrectly, but the SDK NotInFilter.matches gracefully handles + // it and removes the incorrect doc "d". + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereNotIn( + "key", + Arrays.asList( + new Decimal128Value("1.2e3"), + new Decimal128Value("Infinity"), + new Decimal128Value("NaN"))); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("b", "a", "e")); + } + @Test public void filterAndOrderMinKey() throws Exception { Map> docs = @@ -478,6 +568,102 @@ public void filterNullValueWithBsonTypes() throws Exception { randomColl, query, docs, Arrays.asList("a", "d", "e")); } + @Test + public void filterAndOrderNumericalValues() throws Exception { + Map> docs = + map( + "a", + map("key", new Decimal128Value("-1.2e3")), // -1200 + "b", + map("key", new Int32Value(0)), + "c", + map("key", new Decimal128Value("1")), + "d", + map("key", new Int32Value(1)), + "e", + map("key", 1L), + "f", + map("key", 1.0), + "g", + map("key", new Decimal128Value("1.2e-3")), // 0.0012 + "h", + map("key", new Int32Value(2)), + "i", + map("key", new Decimal128Value("NaN")), + "j", + map("key", new Decimal128Value("-Infinity")), + "k", + map("key", NaN), + "l", + map("key", POSITIVE_INFINITY)); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = randomColl.orderBy("key", Direction.DESCENDING); + assertSDKQueryResultsConsistentWithBackend( + randomColl, + orderedQuery, + docs, + Arrays.asList( + "l", // Infinity + "h", // 2 + "f", // 1.0 + "e", // 1 + "d", // 1 + "c", // 1 + "g", // 0.0012 + "b", // 0 + "a", // -1200 + "j", // -Infinity + "k", // NaN + "i" // NaN + )); + + orderedQuery = + randomColl + .orderBy("key", Direction.DESCENDING) + .whereNotEqualTo("key", new Decimal128Value("1.0")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("l", "h", "g", "b", "a", "j", "k", "i")); + + orderedQuery = randomColl.orderBy("key", Direction.DESCENDING).whereEqualTo("key", 1); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("f", "e", "d", "c")); + } + + @Test + public void decimal128ValuesWithNo2sComplementRepresentation() throws Exception { + // For decimal128 values with no 2's complement representation, it is considered not equal to + // a double with the same value, e.g, 1.1. + Map> docs = + map( + "a", + map("key", new Decimal128Value("-1.1e-3")), // -0.0011 + "b", + map("key", new Decimal128Value("1.1")), + "c", + map("key", 1.1), + "d", + map("key", 1.0), + "e", + map("key", new Decimal128Value("1.1e-3")) // 0.0011 + ); + CollectionReference randomColl = testCollectionWithDocsOnNightly(docs); + + Query orderedQuery = randomColl.whereEqualTo("key", new Decimal128Value("1.1")); + assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("b")); + + orderedQuery = randomColl.whereNotEqualTo("key", new Decimal128Value("1.1")); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("a", "e", "d", "c")); + + orderedQuery = randomColl.whereEqualTo("key", 1.1); + assertSDKQueryResultsConsistentWithBackend(randomColl, orderedQuery, docs, Arrays.asList("c")); + + orderedQuery = randomColl.whereNotEqualTo("key", 1.1); + assertSDKQueryResultsConsistentWithBackend( + randomColl, orderedQuery, docs, Arrays.asList("a", "e", "d", "b")); + } + @Test public void orderBsonTypesTogether() throws Exception { Map> docs = @@ -512,6 +698,12 @@ public void orderBsonTypesTogether() throws Exception { map("key", new Int32Value(1)), "int32Value3", map("key", new Int32Value(0)), + "decimal128Value1", + map("key", new Decimal128Value("-1.2e3")), + "decimal128Value2", + map("key", new Decimal128Value("-0.0")), + "decimal128Value3", + map("key", new Decimal128Value("1.2e3")), "minKey1", map("key", MinKey.instance()), "minKey2", @@ -539,9 +731,15 @@ public void orderBsonTypesTogether() throws Exception { "bsonTimestamp1", "bsonTimestamp2", "bsonTimestamp3", + // Int32Value and Decimal128Value are sorted together + "decimal128Value3", "int32Value2", + // Int32Value of 0 equals to Decimal128Value of 0, and falls to document key as second + // order "int32Value3", + "decimal128Value2", "int32Value1", + "decimal128Value1", "minKey2", "minKey1"); diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java index 52cc1d3892f..cd463f457a1 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/POJOTest.java @@ -54,6 +54,7 @@ public static final class POJO { BsonTimestamp bsonTimestamp; RegexValue regexValue; Int32Value int32Value; + Decimal128Value decimal128Value; MinKey minKey; MaxKey maxKey; @@ -74,6 +75,7 @@ public POJO(double number, String str, DocumentReference documentReference) { this.bsonTimestamp = new BsonTimestamp(1, 2); this.regexValue = new RegexValue("^foo", "i"); this.int32Value = new Int32Value(1); + this.decimal128Value = new Decimal128Value("1.2e3"); this.minKey = MinKey.instance(); this.maxKey = MaxKey.instance(); } @@ -174,6 +176,14 @@ public void setInt32Value(Int32Value int32Value) { this.int32Value = int32Value; } + public Decimal128Value getDecimal128Value() { + return decimal128Value; + } + + public void setDecimal128Value(Decimal128Value decimal128Value) { + this.decimal128Value = decimal128Value; + } + public MinKey getMinKey() { return minKey; } @@ -234,6 +244,9 @@ public boolean equals(Object o) { if (!int32Value.equals(pojo.int32Value)) { return false; } + if (!decimal128Value.equals(pojo.decimal128Value)) { + return false; + } if (!minKey.equals(pojo.minKey)) { return false; } @@ -262,6 +275,7 @@ public int hashCode() { result = 31 * result + bsonTimestamp.hashCode(); result = 31 * result + regexValue.hashCode(); result = 31 * result + int32Value.hashCode(); + result = 31 * result + decimal128Value.hashCode(); result = 31 * result + minKey.hashCode(); result = 31 * result + maxKey.hashCode(); return result; @@ -358,7 +372,6 @@ public void testDocumentIdAnnotation() { assertEquals(reference.getId(), readFromStore.getDocReferenceId()); } - // TODO(Mila/BSON): this test is flaky due to a bug in the backend. @Test public void testSetMerge() { CollectionReference collection = testCollectionOnNightly(); diff --git a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java index 13ec0646f5a..5d618549f6c 100644 --- a/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java +++ b/firebase-firestore/src/androidTest/java/com/google/firebase/firestore/TypeTest.java @@ -129,6 +129,26 @@ public void testCanReadAndWriteInt32Value() { verifySuccessfulWriteReadCycle(map("int32", new Int32Value(1)), testDocumentOnNightly()); } + @Test + public void testCanReadAndWriteDecimal128Value() { + Map decimal128Values = + map( + "decimalSciPositive", new Decimal128Value("1.2e3"), + "decimalSciNegative", new Decimal128Value("-1.2e3"), + "decimalSciNegativeExponent", new Decimal128Value("1.2e-3"), + "decimalSciNegativeValueAndExponent", new Decimal128Value("-1.2e-3"), + "decimalSciExplicitPositiveExponent", new Decimal128Value("1.2e+3"), + "decimalFloatPositive", new Decimal128Value("1.1"), + "decimalIntNegative", new Decimal128Value("-1"), + "decimalZeroNegative", new Decimal128Value("-0"), + "decimalZeroInt", new Decimal128Value("0"), + "decimalZeroFloat", new Decimal128Value("0.0"), + "decimalNaN", new Decimal128Value("NaN"), + "decimalInfinityPositive", new Decimal128Value("Infinity"), + "decimalInfinityNegative", new Decimal128Value("-Infinity")); + verifySuccessfulWriteReadCycle(decimal128Values, testDocumentOnNightly()); + } + @Test public void testCanReadAndWriteBsonTimestampValue() { verifySuccessfulWriteReadCycle( @@ -165,6 +185,7 @@ public void testCanReadAndWriteBsonTypesInLists() { new BsonTimestamp(1, 2), BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), new Int32Value(1), + new Decimal128Value("1.2e3"), MinKey.instance(), MaxKey.instance()); @@ -175,13 +196,22 @@ public void testCanReadAndWriteBsonTypesInLists() { public void testCanReadAndWriteBsonTypesInMaps() { Map data = map( - "bsonObjectId", new BsonObjectId("507f191e810c19729de860ea"), - "regex", new RegexValue("^foo", "i"), - "bsonTimestamp", new BsonTimestamp(1, 2), - "bsonBinary", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), - "int32", new Int32Value(1), - "minKey", MinKey.instance(), - "maxKey", MaxKey.instance()); + "bsonObjectId", + new BsonObjectId("507f191e810c19729de860ea"), + "regex", + new RegexValue("^foo", "i"), + "bsonTimestamp", + new BsonTimestamp(1, 2), + "bsonBinary", + BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}), + "int32", + new Int32Value(1), + "decimal128", + new Decimal128Value("1.2e3"), + "minKey", + MinKey.instance(), + "maxKey", + MaxKey.instance()); verifySuccessfulWriteReadCycle(map("BsonTypes", data), testDocumentOnNightly()); } @@ -202,6 +232,42 @@ public void invalidRegexGetsRejected() throws Exception { "Invalid regex option 'a'. Supported options are 'i', 'm', 's', 'u', and 'x'")); } + @Test + public void invalidDecimal128ValueGetsRejected() throws Exception { + Exception error = null; + try { + waitFor(testDocumentOnNightly().set(map("key", new Decimal128Value("")))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue(error.getMessage().contains("Invalid number")); + + try { + waitFor(testDocumentOnNightly().set(map("key", new Decimal128Value("abc")))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue(error.getMessage().contains("Invalid number")); + + try { + waitFor(testDocumentOnNightly().set(map("key", new Decimal128Value("1 23.45")))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue(error.getMessage().contains("Invalid number")); + + try { + waitFor(testDocumentOnNightly().set(map("key", new Decimal128Value("1e1234567890")))); + } catch (Exception e) { + error = e; + } + assertNotNull(error); + assertTrue(error.getMessage().contains("Exponent too large")); + } + @Test public void invalidBsonObjectIdGetsRejected() throws Exception { Exception error = null; @@ -320,6 +386,8 @@ public void testCanUseTypedAccessors() { new RegexValue("^foo", "i"), "int32", new Int32Value(1), + "decimal128", + new Decimal128Value("1.2e3"), "bsonTimestamp", new BsonTimestamp(1, 2), "bsonObjectId", @@ -355,6 +423,7 @@ public void testCanUseTypedAccessors() { assertEquals(data.get("vector"), snapshot.getVectorValue("vector")); assertEquals(data.get("regex"), snapshot.getRegexValue("regex")); assertEquals(data.get("int32"), snapshot.getInt32Value("int32")); + assertEquals(data.get("decimal128"), snapshot.getDecimal128Value("decimal128")); assertEquals(data.get("bsonTimestamp"), snapshot.getBsonTimestamp("bsonTimestamp")); assertEquals(data.get("bsonObjectId"), snapshot.getBsonObjectId("bsonObjectId")); assertEquals(data.get("bsonBinary"), snapshot.getBsonBinaryData("bsonBinary")); @@ -384,6 +453,7 @@ public void testTypeAccessorsCanReturnNull() { assertNull(snapshot.getVectorValue("missing")); assertNull(snapshot.getRegexValue("missing")); assertNull(snapshot.getInt32Value("missing")); + assertNull(snapshot.getDecimal128Value("missing")); assertNull(snapshot.getBsonTimestamp("missing")); assertNull(snapshot.getBsonObjectId("missing")); assertNull(snapshot.getBsonBinaryData("missing")); @@ -410,6 +480,8 @@ public void snapshotListenerSortsDifferentTypesSameAsServer() throws Exception { map("value", Double.NaN), "int32", map("value", new Int32Value(1)), + "decimal128", + map("value", new Decimal128Value("1.2e3")), "double", map("value", 1.0), "int", @@ -453,6 +525,7 @@ public void snapshotListenerSortsDifferentTypesSameAsServer() throws Exception { "double", "int", "int32", + "decimal128", "timestamp", "bsonTimestamp", "string", diff --git a/firebase-firestore/src/main/java/com/google/cloud/datastore/core/number/NumberComparisonHelper.java b/firebase-firestore/src/main/java/com/google/cloud/datastore/core/number/NumberComparisonHelper.java index 6af2ea76995..acb2ba06da5 100644 --- a/firebase-firestore/src/main/java/com/google/cloud/datastore/core/number/NumberComparisonHelper.java +++ b/firebase-firestore/src/main/java/com/google/cloud/datastore/core/number/NumberComparisonHelper.java @@ -14,6 +14,8 @@ package com.google.cloud.datastore.core.number; +import com.google.firebase.firestore.Quadruple; + /** A utility class for comparing numbers. */ public final class NumberComparisonHelper { @@ -95,5 +97,28 @@ public static int firestoreCompareDoubles(double leftDouble, double rightDouble) } } + /** + * Compares Quadruples with Firestore query semantics: NaN precedes all other numbers and equals + * itself, all zeroes are equal. + * + * @return a negative integer, zero, or a positive integer as the first argument is less than, + * equal to, or greater than the second. + */ + public static int firestoreCompareQuadruples(Quadruple left, Quadruple right) { + // For the purposes of comparison, Firestore considers -0 and +0 to be equal. + if ((left.isZero() && right.isZero())) { + return 0; + } + + // NaN sorts equal to itself and before any other number. + if (left.isNaN()) { + return right.isNaN() ? 0 : -1; + } else if (right.isNaN()) { + return 1; + } + + return left.compareTo(right); + } + private NumberComparisonHelper() {} } diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/Decimal128Value.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/Decimal128Value.java new file mode 100644 index 00000000000..173a917969f --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/Decimal128Value.java @@ -0,0 +1,68 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import androidx.annotation.NonNull; +import java.util.Objects; + +/** Represents a 128-bit decimal type in Firestore documents. */ +public final class Decimal128Value { + public final String stringValue; + final Quadruple value; + + public Decimal128Value(@NonNull String val) { + this.stringValue = val; + this.value = Quadruple.fromString(val); + } + + /** + * Returns true if this Decimal128Value is equal to the provided object. + * + * @param obj The object to compare against. + * @return Whether this Decimal128Value is equal to the provided object. + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + Quadruple otherValue = ((Decimal128Value) obj).value; + + // Firestore considers +0 and -0 to be equal. + if (this.value.isZero() && otherValue.isZero()) { + return true; + } + return this.value.compareTo(otherValue) == 0; + } + + @Override + public int hashCode() { + // Since +0 and -0 are considered equal, they should have the same hash code. + if (this.value.isZero()) { + return Objects.hash(Quadruple.POSITIVE_ZERO); + } + return this.value.hashCode(); + } + + @Override + public String toString() { + return "Decimal128Value{value=" + this.stringValue + "}"; + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/DocumentSnapshot.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/DocumentSnapshot.java index 9fc60f674be..1e09299675c 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/DocumentSnapshot.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/DocumentSnapshot.java @@ -545,6 +545,18 @@ public Int32Value getInt32Value(@NonNull String field) { return (Int32Value) get(field); } + /** + * Returns the value of the field as a 128-bit decimal. + * + * @param field The path to the field. + * @throws RuntimeException if the value is not a Decimal128Value. + * @return The value of the field. + */ + @Nullable + public Decimal128Value getDecimal128Value(@NonNull String field) { + return (Decimal128Value) get(field); + } + /** * Returns the value of the field as a BsonObjectId. * diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/FirebaseFirestore.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/FirebaseFirestore.java index c1218829b8a..2b067e28d1f 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/FirebaseFirestore.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/FirebaseFirestore.java @@ -691,7 +691,6 @@ private Task clearPersistence(Executor executor) { }); return source.getTask(); } - ; /** * Attaches a listener for a snapshots-in-sync event. The snapshots-in-sync event indicates that diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/Quadruple.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/Quadruple.java new file mode 100644 index 00000000000..5ab0c13c1c9 --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/Quadruple.java @@ -0,0 +1,309 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package com.google.firebase.firestore; + +import static com.google.firebase.firestore.QuadrupleBuilder.EXPONENT_OF_INFINITY; + +/** + * A 128-bit binary floating point number which supports comparisons and creation from long, double + * and string. + * + * @param negative the sign of the number. + * @param biasedExponent the unsigned and biased (by 0x7FFF_FFFF) binary exponent. + * @param mantHi the unsigned high 64 bits of the mantissa (leading 1 omitted). + * @param mantLo the unsigned low 64 bits of the mantissa. + * + * This class is for internal usage only and should not be exposed externally. + * @hide + */ +public final class Quadruple implements Comparable { + public static final Quadruple POSITIVE_ZERO = new Quadruple(false, 0, 0, 0); + public static final Quadruple NEGATIVE_ZERO = new Quadruple(true, 0, 0, 0); + public static final Quadruple NaN = new Quadruple(false, (int) EXPONENT_OF_INFINITY, 1L << 63, 0); + public static final Quadruple NEGATIVE_INFINITY = + new Quadruple(true, (int) EXPONENT_OF_INFINITY, 0, 0); + public static final Quadruple POSITIVE_INFINITY = + new Quadruple(false, (int) EXPONENT_OF_INFINITY, 0, 0); + private static final Quadruple MIN_LONG = new Quadruple(true, bias(63), 0, 0); + private static final Quadruple POSITIVE_ONE = new Quadruple(false, bias(0), 0, 0); + private static final Quadruple NEGATIVE_ONE = new Quadruple(true, bias(0), 0, 0); + private final boolean negative; + private final int biasedExponent; + private final long mantHi; + private final long mantLo; + + /** + * Build a new quadruple from its raw representation - sign, biased exponent, 128-bit mantissa. + */ + public Quadruple(boolean negative, int biasedExponent, long mantHi, long mantLo) { + this.negative = negative; + this.biasedExponent = biasedExponent; + this.mantHi = mantHi; + this.mantLo = mantLo; + } + + /** Return the sign of this {@link Quadruple}. */ + public boolean negative() { + return negative; + } + + /** Return the unsigned-32-bit biased exponent of this {@link Quadruple}. */ + public int biasedExponent() { + return biasedExponent; + } + + /** Return the high-order unsigned-64-bits of the mantissa of this {@link Quadruple}. */ + public long mantHi() { + return mantHi; + } + + /** Return the low-order unsigned-64-bits of the mantissa of this {@link Quadruple}. */ + public long mantLo() { + return mantLo; + } + + /** Return the (unbiased) exponent of this {@link Quadruple}. */ + public int exponent() { + return biasedExponent - QuadrupleBuilder.EXPONENT_BIAS; + } + + /** Return true if this {@link Quadruple} is -0 or +0 */ + public boolean isZero() { + return biasedExponent == 0 && mantHi == 0 && mantLo == 0; + } + + /** Return true if this {@link Quadruple} is -infinity or +infinity */ + public boolean isInfinite() { + return biasedExponent == (int) EXPONENT_OF_INFINITY && mantHi == 0 && mantLo == 0; + } + + /** Return true if this {@link Quadruple} is a NaN. */ + public boolean isNaN() { + return biasedExponent == (int) EXPONENT_OF_INFINITY && !(mantHi == 0 && mantLo == 0); + } + + // equals (and hashCode) follow Double.equals: all NaNs are equal and -0 != 0 + @Override + public boolean equals(Object other) { + if (!(other instanceof Quadruple)) { + return false; + } + Quadruple otherQuadruple = (Quadruple) other; + if (isNaN()) { + return otherQuadruple.isNaN(); + } else { + return negative == otherQuadruple.negative + && biasedExponent == otherQuadruple.biasedExponent + && mantHi == otherQuadruple.mantHi + && mantLo == otherQuadruple.mantLo; + } + } + + @Override + public int hashCode() { + if (isNaN()) { + return HASH_NAN; + } else { + int hashCode = Boolean.hashCode(negative); + hashCode = hashCode * 31 + Integer.hashCode(biasedExponent); + hashCode = hashCode * 31 + Long.hashCode(mantHi); + hashCode = hashCode * 31 + Long.hashCode(mantLo); + return hashCode; + } + } + + private static final int HASH_NAN = 31 * 31 * Integer.hashCode((int) EXPONENT_OF_INFINITY); + + // Compare two quadruples, with -0 < 0, and all NaNs equal and larger than all numbers. + @Override + public int compareTo(Quadruple other) { + if (isNaN()) { + return other.isNaN() ? 0 : 1; + } + if (other.isNaN()) { + return -1; + } + int lessThan; + int greaterThan; + if (negative) { + if (!other.negative) { + return -1; + } + lessThan = 1; + greaterThan = -1; + } else { + if (other.negative) { + return 1; + } + lessThan = -1; + greaterThan = 1; + } + int expCompare = Integer.compareUnsigned(biasedExponent, other.biasedExponent); + if (expCompare < 0) { + return lessThan; + } + if (expCompare > 0) { + return greaterThan; + } + int mantHiCompare = Long.compareUnsigned(mantHi, other.mantHi); + if (mantHiCompare < 0) { + return lessThan; + } + if (mantHiCompare > 0) { + return greaterThan; + } + int mantLoCompare = Long.compareUnsigned(mantLo, other.mantLo); + if (mantLoCompare < 0) { + return lessThan; + } + if (mantLoCompare > 0) { + return greaterThan; + } + return 0; + } + + public static Quadruple fromLong(long value) { + if (value == Long.MIN_VALUE) { + return MIN_LONG; + } + if (value == 0) { + return POSITIVE_ZERO; + } + if (value == 1) { + return POSITIVE_ONE; + } + if (value == -1) { + return NEGATIVE_ONE; + } + boolean negative = value < 0; + if (negative) { + value = -value; + } + // Left-justify with the leading 1 dropped - value=0 or 1 is handled separately above, so + // leadingZeros+1 <= 63. + int leadingZeros = Long.numberOfLeadingZeros(value); + return new Quadruple(negative, bias(63 - leadingZeros), value << (leadingZeros + 1), 0); + } + + public static Quadruple fromDouble(double value) { + if (Double.isNaN(value)) { + return NaN; + } + if (Double.isInfinite(value)) { + return value < 0 ? NEGATIVE_INFINITY : POSITIVE_INFINITY; + } + if (Double.compare(value, 0.0) == 0) { + return POSITIVE_ZERO; + } + if (Double.compare(value, -0.0) == 0) { + return NEGATIVE_ZERO; + } + long bits = Double.doubleToLongBits(value); + long mantHi = bits << 12; + long exponent = bits >>> 52 & 0x7ff; + if (exponent == 0) { + // subnormal - mantHi cannot be zero as that means value==+/-0 + int leadingZeros = Long.numberOfLeadingZeros(mantHi); + mantHi = leadingZeros < 63 ? mantHi << (leadingZeros + 1) : 0; + exponent = -leadingZeros; + } + return new Quadruple(value < 0, bias((int) (exponent - 1023)), mantHi, 0); + } + + /** + * Converts a decimal number to a {@link Quadruple}. The supported format (no whitespace allowed) + * is: + * + *
    + *
  • NaN for Quadruple.NaN + *
  • Infinity or +Infinity for Quadruple.POSITIVE_INFINITY + *
  • -Infinity for Quadruple.NEGATIVE_INFINITY + *
  • regular expression: [+-]?[0-9]*(.[0-9]*)?([eE][+-]?[0-9]+)? - the exponent cannot be more + * than 9 digits, and the whole string cannot be empty + *
+ */ + public static Quadruple fromString(String s) { + if (s.equals("NaN")) { + return NaN; + } + if (s.equals("-Infinity")) { + return NEGATIVE_INFINITY; + } + if (s.equals("Infinity") || s.equals("+Infinity")) { + return POSITIVE_INFINITY; + } + char[] chars = s.toCharArray(); + byte[] digits = new byte[chars.length]; + int len = chars.length; + int i = 0; + int j = 0; + int exponent = 0; + boolean negative = false; + if (i < len) { + if (chars[i] == '-') { + negative = true; + i++; + } else if (chars[i] == '+') { + i++; + } + } + int firstDigit = i; + while (i < len && Character.isDigit(chars[i])) { + digits[j++] = (byte) (chars[i++] - '0'); + } + if (i < len && chars[i] == '.') { + int decimal = ++i; + while (i < len && Character.isDigit(chars[i])) { + digits[j++] = (byte) (chars[i++] - '0'); + } + exponent = decimal - i; + } + if (i < len && (chars[i] == 'e' || chars[i] == 'E')) { + int exponentValue = 0; + i++; + int exponentSign = 1; + if (i < len) { + if (chars[i] == '-') { + exponentSign = -1; + i++; + } else if (chars[i] == '+') { + i++; + } + } + int firstExponent = i; + while (i < len && Character.isDigit(chars[i])) { + exponentValue = exponentValue * 10 + chars[i++] - '0'; + if (i - firstExponent > 9) { + throw new NumberFormatException("Exponent too large " + s); + } + } + if (i == firstExponent) { + throw new NumberFormatException("Invalid number " + s); + } + exponent += exponentValue * exponentSign; + } + if (j == 0 || i != len) { + throw new NumberFormatException("Invalid number " + s); + } + byte[] digitsCopy = new byte[j]; + System.arraycopy(digits, 0, digitsCopy, 0, j); + QuadrupleBuilder parsed = QuadrupleBuilder.parseDecimal(digitsCopy, exponent); + return new Quadruple(negative, parsed.exponent, parsed.mantHi, parsed.mantLo); + } + + private static final int bias(int exponent) { + return exponent + QuadrupleBuilder.EXPONENT_BIAS; + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/QuadrupleBuilder.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/QuadrupleBuilder.java new file mode 100644 index 00000000000..1b8e4de0e40 --- /dev/null +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/QuadrupleBuilder.java @@ -0,0 +1,822 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/* + * Copyright 2021 M.Vokhmentsev + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.firebase.firestore; + +/** + * This class is for internal usage only and should not be exposed externally. + * @hide + */ +public class QuadrupleBuilder { + public static QuadrupleBuilder parseDecimal(byte[] digits, int exp10) { + QuadrupleBuilder q = new QuadrupleBuilder(); + q.parse(digits, exp10); + return q; + } + + // The fields containing the value of the instance + public int exponent; + public long mantHi; + public long mantLo; + // 2^192 = 6.277e57, so the 58-th digit after point may affect the result + static final int MAX_MANTISSA_LENGTH = 59; + // Max value of the decimal exponent, corresponds to EXPONENT_OF_MAX_VALUE + static final int MAX_EXP10 = 646456993; + // Min value of the decimal exponent, corresponds to EXPONENT_OF_MIN_NORMAL + static final int MIN_EXP10 = -646457032; + // (2^63) / 10 =~ 9.223372e17 + static final double TWO_POW_63_DIV_10 = 922337203685477580.0; + // Just for convenience: 0x8000_0000_0000_0000L + static final long HIGH_BIT = 0x8000000000000000L; + // Just for convenience: 0x8000_0000L, 2^31 + static final double POW_2_31 = 2147483648.0; + // Just for convenience: 0x0000_0000_FFFF_FFFFL + static final long LOWER_32_BITS = 0x00000000FFFFFFFFL; + // Just for convenience: 0xFFFF_FFFF_0000_0000L; + static final long HIGHER_32_BITS = 0xFFFFFFFF00000000L; + // Approximate value of log2(10) + static final double LOG2_10 = Math.log(10) / Math.log(2); + // Approximate value of log2(e) + static final double LOG2_E = 1 / Math.log(2.0); + // The value of the exponent (biased) corresponding to {@code 1.0 == 2^0}; equals to 2_147_483_647 + // ({@code 0x7FFF_FFFF}). + static final int EXPONENT_BIAS = 0x7FFF_FFFF; + // The value of the exponent (biased), corresponding to {@code Infinity}, {@code _Infinty}, and + // {@code NaN} + static final long EXPONENT_OF_INFINITY = 0xFFFFFFFFL; + // An array of positive powers of two, each value consists of 4 longs: decimal exponent and 3 x 64 + // bits of mantissa, divided by ten Used to find an arbitrary power of 2 (by powerOfTwo(long exp)) + private static final long[][] POS_POWERS_OF_2 = { // 0: 2^0 = 1 = 0.1e1 + { + 1, 0x1999_9999_9999_9999L, 0x9999_9999_9999_9999L, 0x9999_9999_9999_999aL + }, // 1: 2^(2^0) = 2^1 = 2 = 0.2e1 + {1, 0x3333_3333_3333_3333L, 0x3333_3333_3333_3333L, 0x3333_3333_3333_3334L}, // *** + // 2: 2^(2^1) = 2^2 = 4 = 0.4e1 + {1, 0x6666_6666_6666_6666L, 0x6666_6666_6666_6666L, 0x6666_6666_6666_6667L}, // *** + // 3: 2^(2^2) = 2^4 = 16 = 0.16e2 + {2, 0x28f5_c28f_5c28_f5c2L, 0x8f5c_28f5_c28f_5c28L, 0xf5c2_8f5c_28f5_c290L}, // *** + // 4: 2^(2^3) = 2^8 = 256 = 0.256e3 + {3, 0x4189_374b_c6a7_ef9dL, 0xb22d_0e56_0418_9374L, 0xbc6a_7ef9_db22_d0e6L}, // *** + // 5: 2^(2^4) = 2^16 = 65536 = 0.65536e5 + { + 5, 0xa7c5_ac47_1b47_8423L, 0x0fcf_80dc_3372_1d53L, 0xcddd_6e04_c059_2104L + }, // 6: 2^(2^5) = 2^32 = 4294967296 = 0.4294967296e10 + { + 10, 0x6df3_7f67_5ef6_eadfL, 0x5ab9_a207_2d44_268dL, 0x97df_837e_6748_956eL + }, // 7: 2^(2^6) = 2^64 = 18446744073709551616 = 0.18446744073709551616e20 + { + 20, 0x2f39_4219_2484_46baL, 0xa23d_2ec7_29af_3d61L, 0x0607_aa01_67dd_94cbL + }, // 8: 2^(2^7) = 2^128 = 340282366920938463463374607431768211456 = + // 0.340282366920938463463374607431768211456e39 + { + 39, 0x571c_bec5_54b6_0dbbL, 0xd5f6_4baf_0506_840dL, 0x451d_b70d_5904_029bL + }, // 9: 2^(2^8) = 2^256 = + // 1.1579208923731619542357098500868790785326998466564056403945758401E+77 = + // 0.11579208923731619542357098500868790785326998466564056403945758401e78 + {78, 0x1da4_8ce4_68e7_c702L, 0x6520_247d_3556_476dL, 0x1469_caf6_db22_4cfaL}, // *** + // 10: 2^(2^9) = 2^512 = + // 1.3407807929942597099574024998205846127479365820592393377723561444E+154 = + // 0.13407807929942597099574024998205846127479365820592393377723561444e155 + { + 155, 0x2252_f0e5_b397_69dcL, 0x9ae2_eea3_0ca3_ade0L, 0xeeaa_3c08_dfe8_4e30L + }, // 11: 2^(2^10) = 2^1024 = + // 1.7976931348623159077293051907890247336179769789423065727343008116E+308 = + // 0.17976931348623159077293051907890247336179769789423065727343008116e309 + { + 309, 0x2e05_5c9a_3f6b_a793L, 0x1658_3a81_6eb6_0a59L, 0x22c4_b082_6cf1_ebf7L + }, // 12: 2^(2^11) = 2^2048 = + // 3.2317006071311007300714876688669951960444102669715484032130345428E+616 = + // 0.32317006071311007300714876688669951960444102669715484032130345428e617 + { + 617, 0x52bb_45e9_cf23_f17fL, 0x7688_c076_06e5_0364L, 0xb344_79aa_9d44_9a57L + }, // 13: 2^(2^12) = 2^4096 = + // 1.0443888814131525066917527107166243825799642490473837803842334833E+1233 = + // 0.10443888814131525066917527107166243825799642490473837803842334833e1234 + { + 1234, 0x1abc_81c8_ff5f_846cL, 0x8f5e_3c98_53e3_8c97L, 0x4506_0097_f3bf_9296L + }, // 14: 2^(2^13) = 2^8192 = + // 1.0907481356194159294629842447337828624482641619962326924318327862E+2466 = + // 0.10907481356194159294629842447337828624482641619962326924318327862e2467 + { + 2467, 0x1bec_53b5_10da_a7b4L, 0x4836_9ed7_7dbb_0eb1L, 0x3b05_587b_2187_b41eL + }, // 15: 2^(2^14) = 2^16384 = + // 1.1897314953572317650857593266280071307634446870965102374726748212E+4932 = + // 0.11897314953572317650857593266280071307634446870965102374726748212e4933 + { + 4933, 0x1e75_063a_5ba9_1326L, 0x8abf_b8e4_6001_6ae3L, 0x2800_8702_d29e_8a3cL + }, // 16: 2^(2^15) = 2^32768 = + // 1.4154610310449547890015530277449516013481307114723881672343857483E+9864 = + // 0.14154610310449547890015530277449516013481307114723881672343857483e9865 + { + 9865, 0x243c_5d8b_b5c5_fa55L, 0x40c6_d248_c588_1915L, 0x4c0f_d99f_d5be_fc22L + }, // 17: 2^(2^16) = 2^65536 = + // 2.0035299304068464649790723515602557504478254755697514192650169737E+19728 = + // 0.20035299304068464649790723515602557504478254755697514192650169737e19729 + { + 19729, 0x334a_5570_c3f4_ef3cL, 0xa13c_36c4_3f97_9c90L, 0xda7a_c473_555f_b7a8L + }, // 18: 2^(2^17) = 2^131072 = + // 4.0141321820360630391660606060388767343771510270414189955825538065E+39456 = + // 0.40141321820360630391660606060388767343771510270414189955825538065e39457 + { + 39457, 0x66c3_0444_5dd9_8f3bL, 0xa8c2_93a2_0e47_a41bL, 0x4c5b_03dc_1260_4964L + }, // 19: 2^(2^18) = 2^262144 = + // 1.6113257174857604736195721184520050106440238745496695174763712505E+78913 = + // 0.16113257174857604736195721184520050106440238745496695174763712505e78914 + { + 78914, 0x293f_fbf5_fb02_8cc4L, 0x89d3_e5ff_4423_8406L, 0x369a_339e_1bfe_8c9bL + }, // 20: 2^(2^19) = 2^524288 = + // 2.5963705678310007761265964957268828277447343763484560463573654868E+157826 = + // 0.25963705678310007761265964957268828277447343763484560463573654868e157827 + { + 157827, 0x4277_92fb_b68e_5d20L, 0x7b29_7cd9_fc15_4b62L, 0xf091_4211_4aa9_a20cL + }, // 21: 2^(2^20) = 2^1048576 = + // 6.7411401254990734022690651047042454376201859485326882846944915676E+315652 = + // 0.67411401254990734022690651047042454376201859485326882846944915676e315653 + { + 315653, 0xac92_bc65_ad5c_08fcL, 0x00be_eb11_5a56_6c19L, 0x4ba8_82d8_a462_2437L + }, // 22: 2^(2^21) = 2^2097152 = + // 4.5442970191613663099961595907970650433180103994591456270882095573E+631305 = + // 0.45442970191613663099961595907970650433180103994591456270882095573e631306 + { + 631306, 0x7455_8144_0f92_e80eL, 0x4da8_22cf_7f89_6f41L, 0x509d_5986_7816_4ecdL + }, // 23: 2^(2^22) = 2^4194304 = + // 2.0650635398358879243991194945816501695274360493029670347841664177E+1262611 = + // 0.20650635398358879243991194945816501695274360493029670347841664177e1262612 + { + 1262612, 0x34dd_99b4_c695_23a5L, 0x64bc_2e8f_0d8b_1044L, 0xb03b_1c96_da5d_d349L + }, // 24: 2^(2^23) = 2^8388608 = + // 4.2644874235595278724327289260856157547554200794957122157246170406E+2525222 = + // 0.42644874235595278724327289260856157547554200794957122157246170406e2525223 + { + 2525223, 0x6d2b_bea9_d6d2_5a08L, 0xa0a4_606a_88e9_6b70L, 0x1820_63bb_c2fe_8520L + }, // 25: 2^(2^24) = 2^16777216 = + // 1.8185852985697380078927713277749906189248596809789408311078112486E+5050445 = + // 0.18185852985697380078927713277749906189248596809789408311078112486e5050446 + { + 5050446, 0x2e8e_47d6_3bfd_d6e3L, 0x2b55_fa89_76ea_a3e9L, 0x1a6b_9d30_8641_2a73L + }, // 26: 2^(2^25) = 2^33554432 = + // 3.3072524881739831340558051919726975471129152081195558970611353362E+10100890 = + // 0.33072524881739831340558051919726975471129152081195558970611353362e10100891 + { + 10100891, 0x54aa_68ef_a1d7_19dfL, 0xd850_5806_612c_5c8fL, 0xad06_8837_fee8_b43aL + }, // 27: 2^(2^26) = 2^67108864 = + // 1.0937919020533002449982468634925923461910249420785622990340704603E+20201781 = + // 0.10937919020533002449982468634925923461910249420785622990340704603e20201782 + { + 20201782, 0x1c00_464c_cb7b_ae77L, 0x9e38_7778_4c77_982cL, 0xd94a_f3b6_1717_404fL + }, // 28: 2^(2^27) = 2^134217728 = + // 1.1963807249973763567102377630870670302911237824129274789063323723E+40403562 = + // 0.11963807249973763567102377630870670302911237824129274789063323723e40403563 + { + 40403563, 0x1ea0_99c8_be2b_6cd0L, 0x8bfb_6d53_9fa5_0466L, 0x6d3b_c37e_69a8_4218L + }, // 29: 2^(2^28) = 2^268435456 = + // 1.4313268391452478724777126233530788980596273340675193575004129517E+80807124 = + // 0.14313268391452478724777126233530788980596273340675193575004129517e80807125 + { + 80807125, 0x24a4_57f4_66ce_8d18L, 0xf2c8_f3b8_1bc6_bb59L, 0xa78c_7576_92e0_2d49L + }, // 30: 2^(2^29) = 2^536870912 = + // 2.0486965204575262773910959587280218683219330308711312100181276813E+161614248 = + // 0.20486965204575262773910959587280218683219330308711312100181276813e161614249 + { + 161614249, 0x3472_5667_7aba_6b53L, 0x3fbf_90d3_0611_a67cL, 0x1e03_9d87_e0bd_b32bL + }, // 31: 2^(2^30) = 2^1073741824 = + // 4.1971574329347753848087162337676781412761959309467052555732924370E+323228496 = + // 0.41971574329347753848087162337676781412761959309467052555732924370e323228497 + { + 323228497, 0x6b72_7daf_0fd3_432aL, 0x71f7_1121_f9e4_200fL, 0x8fcd_9942_d486_c10cL + }, // 32: 2^(2^31) = 2^2147483648 = + // 1.7616130516839633532074931497918402856671115581881347960233679023E+646456993 = + // 0.17616130516839633532074931497918402856671115581881347960233679023e646456994 + {646456994, 0x2d18_e844_84d9_1f78L, 0x4079_bfe7_829d_ec6fL, 0x2155_1643_e365_abc6L} + }; + // An array of negative powers of two, each value consists of 4 longs: decimal exponent and 3 x 64 + // bits of mantissa, divided by ten. Used to find an arbitrary power of 2 (by powerOfTwo(long + // exp)) + private static final long[][] NEG_POWERS_OF_2 = { // v18 + // 0: 2^0 = 1 = 0.1e1 + { + 1, 0x1999_9999_9999_9999L, 0x9999_9999_9999_9999L, 0x9999_9999_9999_999aL + }, // 1: 2^-(2^0) = 2^-1 = 0.5 = 0.5e0 + { + 0, 0x8000_0000_0000_0000L, 0x0000_0000_0000_0000L, 0x0000_0000_0000_0000L + }, // 2: 2^-(2^1) = 2^-2 = 0.25 = 0.25e0 + // {0, 0x4000_0000_0000_0000L, 0x0000_0000_0000_0000L, 0x0000_0000_0000_0000L}, + {0, 0x4000_0000_0000_0000L, 0x0000_0000_0000_0000L, 0x0000_0000_0000_0001L}, // *** + // 3: 2^-(2^2) = 2^-4 = 0.0625 = 0.625e-1 + { + -1, 0xa000_0000_0000_0000L, 0x0000_0000_0000_0000L, 0x0000_0000_0000_0000L + }, // 4: 2^-(2^3) = 2^-8 = 0.00390625 = 0.390625e-2 + { + -2, 0x6400_0000_0000_0000L, 0x0000_0000_0000_0000L, 0x0000_0000_0000_0000L + }, // 5: 2^-(2^4) = 2^-16 = 0.0000152587890625 = 0.152587890625e-4 + {-4, 0x2710_0000_0000_0000L, 0x0000_0000_0000_0000L, 0x0000_0000_0000_0001L}, // *** + // 6: 2^-(2^5) = 2^-32 = 2.3283064365386962890625E-10 = 0.23283064365386962890625e-9 + {-9, 0x3b9a_ca00_0000_0000L, 0x0000_0000_0000_0000L, 0x0000_0000_0000_0001L}, // *** + // 7: 2^-(2^6) = 2^-64 = 5.42101086242752217003726400434970855712890625E-20 = + // 0.542101086242752217003726400434970855712890625e-19 + { + -19, 0x8ac7_2304_89e8_0000L, 0x0000_0000_0000_0000L, 0x0000_0000_0000_0000L + }, // 8: 2^-(2^7) = 2^-128 = + // 2.9387358770557187699218413430556141945466638919302188037718792657E-39 = + // 0.29387358770557187699218413430556141945466638919302188037718792657e-38 + {-38, 0x4b3b_4ca8_5a86_c47aL, 0x098a_2240_0000_0000L, 0x0000_0000_0000_0001L}, // *** + // 9: 2^-(2^8) = 2^-256 = + // 8.6361685550944446253863518628003995711160003644362813850237034700E-78 = + // 0.86361685550944446253863518628003995711160003644362813850237034700e-77 + { + -77, 0xdd15_fe86_affa_d912L, 0x49ef_0eb7_13f3_9ebeL, 0xaa98_7b6e_6fd2_a002L + }, // 10: 2^-(2^9) = 2^-512 = + // 7.4583407312002067432909653154629338373764715346004068942715183331E-155 = + // 0.74583407312002067432909653154629338373764715346004068942715183331e-154 + { + -154, 0xbeee_fb58_4aff_8603L, 0xaafb_550f_facf_d8faL, 0x5ca4_7e4f_88d4_5371L + }, // 11: 2^-(2^10) = 2^-1024 = + // 5.5626846462680034577255817933310101605480399511558295763833185421E-309 = + // 0.55626846462680034577255817933310101605480399511558295763833185421e-308 + {-308, 0x8e67_9c2f_5e44_ff8fL, 0x570f_09ea_a7ea_7648L, 0x5961_db50_c6d2_b888L}, // *** + // 12: 2^-(2^11) = 2^-2048 = + // 3.0943460473825782754801833699711978538925563038849690459540984582E-617 = + // 0.30943460473825782754801833699711978538925563038849690459540984582e-616 + { + -616, 0x4f37_1b33_99fc_2ab0L, 0x8170_041c_9feb_05aaL, 0xc7c3_4344_7c75_bcf6L + }, // 13: 2^-(2^12) = 2^-4096 = + // 9.5749774609521853579467310122804202420597417413514981491308464986E-1234 = + // 0.95749774609521853579467310122804202420597417413514981491308464986e-1233 + { + -1233, 0xf51e_9281_7901_3fd3L, 0xde4b_d12c_de4d_985cL, 0x4a57_3ca6_f94b_ff14L + }, // 14: 2^-(2^13) = 2^-8192 = + // 9.1680193377742358281070619602424158297818248567928361864131947526E-2467 = + // 0.91680193377742358281070619602424158297818248567928361864131947526e-2466 + { + -2466, 0xeab3_8812_7bcc_aff7L, 0x1667_6391_42b9_fbaeL, 0x775e_c999_5e10_39fbL + }, // 15: 2^-(2^14) = 2^-16384 = + // 8.4052578577802337656566945433043815064951983621161781002720680748E-4933 = + // 0.84052578577802337656566945433043815064951983621161781002720680748e-4932 + { + -4932, 0xd72c_b2a9_5c7e_f6ccL, 0xe81b_f1e8_25ba_7515L, 0xc2fe_b521_d6cb_5dcdL + }, // 16: 2^-(2^15) = 2^-32768 = + // 7.0648359655776364427774021878587184537374439102725065590941425796E-9865 = + // 0.70648359655776364427774021878587184537374439102725065590941425796e-9864 + {-9864, 0xb4dc_1be6_6045_02dcL, 0xd491_079b_8eef_6535L, 0x578d_3965_d24d_e84dL}, // *** + // 17: 2^-(2^16) = 2^-65536 = + // 4.9911907220519294656590574792132451973746770423207674161425040336E-19729 = + // 0.49911907220519294656590574792132451973746770423207674161425040336e-19728 + {-19728, 0x7fc6_447b_ee60_ea43L, 0x2548_da5c_8b12_5b27L, 0x5f42_d114_2f41_d349L}, // *** + // 18: 2^-(2^17) = 2^-131072 = + // 2.4911984823897261018394507280431349807329035271689521242878455599E-39457 = + // 0.24911984823897261018394507280431349807329035271689521242878455599e-39456 + {-39456, 0x3fc6_5180_f88a_f8fbL, 0x6a69_15f3_8334_9413L, 0x063c_3708_b6ce_b291L}, // *** + // 19: 2^-(2^18) = 2^-262144 = + // 6.2060698786608744707483205572846793091942192651991171731773832448E-78914 = + // 0.62060698786608744707483205572846793091942192651991171731773832448e-78913 + { + -78913, 0x9ee0_197c_8dcd_55bfL, 0x2b2b_9b94_2c38_f4a2L, 0x0f8b_a634_e9c7_06aeL + }, // 20: 2^-(2^19) = 2^-524288 = + // 3.8515303338821801176537443725392116267291403078581314096728076497E-157827 = + // 0.38515303338821801176537443725392116267291403078581314096728076497e-157826 + {-157826, 0x6299_63a2_5b8b_2d79L, 0xd00b_9d22_86f7_0876L, 0xe970_0470_0c36_44fcL}, // *** + // 21: 2^-(2^20) = 2^-1048576 = + // 1.4834285912814577854404052243709225888043963245995136935174170977E-315653 = + // 0.14834285912814577854404052243709225888043963245995136935174170977e-315652 + { + -315652, 0x25f9_cc30_8cee_f4f3L, 0x40f1_9543_911a_4546L, 0xa2cd_3894_52cf_c366L + }, // 22: 2^-(2^21) = 2^-2097152 = + // 2.2005603854312903332428997579002102976620485709683755186430397089E-631306 = + // 0.22005603854312903332428997579002102976620485709683755186430397089e-631305 + { + -631305, 0x3855_97b0_d47e_76b8L, 0x1b9f_67e1_03bf_2329L, 0xc311_9848_5959_85f7L + }, // 23: 2^-(2^22) = 2^-4194304 = + // 4.8424660099295090687215589310713586524081268589231053824420510106E-1262612 = + // 0.48424660099295090687215589310713586524081268589231053824420510106e-1262611 + {-1262611, 0x7bf7_95d2_76c1_2f66L, 0x66a6_1d62_a446_659aL, 0xa1a4_d73b_ebf0_93d5L}, // *** + // 24: 2^-(2^23) = 2^-8388608 = + // 2.3449477057322620222546775527242476219043877555386221929831430440E-2525223 = + // 0.23449477057322620222546775527242476219043877555386221929831430440e-2525222 + {-2525222, 0x3c07_d96a_b1ed_7799L, 0xcb73_55c2_2cc0_5ac0L, 0x4ffc_0ab7_3b1f_6a49L}, // *** + // 25: 2^-(2^24) = 2^-16777216 = + // 5.4987797426189993226257377747879918011694025935111951649826798628E-5050446 = + // 0.54987797426189993226257377747879918011694025935111951649826798628e-5050445 + {-5050445, 0x8cc4_cd8c_3ede_fb9aL, 0x6c8f_f86a_90a9_7e0cL, 0x166c_fddb_f98b_71bfL}, // *** + // 26: 2^-(2^25) = 2^-33554432 = + // 3.0236578657837068435515418409027857523343464783010706819696074665E-10100891 = + // 0.30236578657837068435515418409027857523343464783010706819696074665e-10100890 + {-10100890, 0x4d67_d81c_c88e_1228L, 0x1d7c_fb06_666b_79b3L, 0x7b91_6728_aaa4_e70dL}, // *** + // 27: 2^-(2^26) = 2^-67108864 = + // 9.1425068893156809483320844568740945600482370635012633596231964471E-20201782 = + // 0.91425068893156809483320844568740945600482370635012633596231964471e-20201781 + {-20201781, 0xea0c_5549_4e7a_552dL, 0xb88c_b948_4bb8_6c61L, 0x8d44_893c_610b_b7dFL}, // *** + // 28: 2^-(2^27) = 2^-134217728 = + // 8.3585432221184688810803924874542310018191301711943564624682743545E-40403563 = + // 0.83585432221184688810803924874542310018191301711943564624682743545e-40403562 + { + -40403562, 0xd5fa_8c82_1ec0_c24aL, 0xa80e_46e7_64e0_f8b0L, 0xa727_6bfa_432f_ac7eL + }, // 29: 2^-(2^28) = 2^-268435456 = + // 6.9865244796022595809958912202005005328020601847785697028605460277E-80807125 = + // 0.69865244796022595809958912202005005328020601847785697028605460277e-80807124 + { + -80807124, 0xb2da_e307_426f_6791L, 0xc970_b82f_58b1_2918L, 0x0472_592f_7f39_190eL + }, // 30: 2^-(2^29) = 2^-536870912 = + // 4.8811524304081624052042871019605298977947353140996212667810837790E-161614249 = + // 0.48811524304081624052042871019605298977947353140996212667810837790e-161614248 + // {-161614248, 0x7cf5_1edd_8a15_f1c9L, 0x656d_ab34_98f8_e697L, 0x12da_a2a8_0e53_c809L}, + { + -161614248, 0x7cf5_1edd_8a15_f1c9L, 0x656d_ab34_98f8_e697L, 0x12da_a2a8_0e53_c807L + }, // 31: 2^-(2^30) = 2^-1073741824 = + // 2.3825649048879510732161697817326745204151961255592397879550237608E-323228497 = + // 0.23825649048879510732161697817326745204151961255592397879550237608e-323228496 + { + -323228496, 0x3cfe_609a_b588_3c50L, 0xbec8_b5d2_2b19_8871L, 0xe184_7770_3b46_22b4L + }, // 32: 2^-(2^31) = 2^-2147483648 = + // 5.6766155260037313438164181629489689531186932477276639365773003794E-646456994 = + // 0.56766155260037313438164181629489689531186932477276639365773003794e-646456993 + {-646456993, 0x9152_447b_9d7c_da9aL, 0x3b4d_3f61_10d7_7aadL, 0xfa81_bad1_c394_adb4L} + }; + // Buffers used internally + // The order of words in the arrays is big-endian: the highest part is in buff[0] (in buff[1] for + // buffers of 10 words) + + private final long[] buffer4x64B = new long[4]; + private final long[] buffer6x32A = new long[6]; + private final long[] buffer6x32B = new long[6]; + private final long[] buffer6x32C = new long[6]; + private final long[] buffer12x32 = new long[12]; + + private void parse(byte[] digits, int exp10) { + exp10 += (digits).length - 1; // digits is viewed as x.yyy below. + this.exponent = 0; + this.mantHi = 0L; + this.mantLo = 0L; + // Finds numeric value of the decimal mantissa + long[] mantissa = this.buffer6x32C; + int exp10Corr = parseMantissa(digits, mantissa); + if (exp10Corr == 0 && isEmpty(mantissa)) { + // Mantissa == 0 + return; + } + // takes account of the point position in the mant string and possible carry as a result of + // round-up (like 9.99e1 -> 1.0e2) + exp10 += exp10Corr; + if (exp10 < MIN_EXP10) { + return; + } + if (exp10 > MAX_EXP10) { + this.exponent = ((int) (long) (EXPONENT_OF_INFINITY)); + return; + } + double exp2 = findBinaryExponent(exp10, mantissa); + // Finds binary mantissa and possible exponent correction. Fills the fields. + findBinaryMantissa(exp10, exp2, mantissa); + } + + private int parseMantissa(byte[] digits, long[] mantissa) { + for (int i = (0); i < (6); i++) { + mantissa[i] = 0L; + } + // Skip leading zeroes + int firstDigit = 0; + while (firstDigit < (digits).length && digits[firstDigit] == 0) { + firstDigit += 1; + } + if (firstDigit == (digits).length) { + return 0; // All zeroes + } + int expCorr = -firstDigit; + // Limit the string length to avoid unnecessary fuss + if ((digits).length - firstDigit > MAX_MANTISSA_LENGTH) { + boolean carry = digits[MAX_MANTISSA_LENGTH] >= 5; // The highest digit to be truncated + byte[] truncated = new byte[MAX_MANTISSA_LENGTH]; + ; + for (int i = (0); i < (MAX_MANTISSA_LENGTH); i++) { + truncated[i] = digits[i + firstDigit]; + } + if (carry) { // Round-up: add carry + expCorr += addCarry(truncated); // May add an extra digit in front of it (99..99 -> 100) + } + digits = truncated; + firstDigit = 0; + } + for (int i = ((digits).length) - 1; i >= (firstDigit); i--) { // digits, starting from the last + mantissa[0] |= ((long) (digits[i])) << 32L; + divBuffBy10(mantissa); + } + return expCorr; + } + + // Divides the unpacked value stored in the given buffer by 10 + // @param buffer contains the unpacked value to divide (32 least significant bits are used) + private void divBuffBy10(long[] buffer) { + int maxIdx = (buffer).length; + // big/endian + for (int i = (0); i < (maxIdx); i++) { + long r = buffer[i] % 10L; + buffer[i] = ((buffer[i]) / (10L)); + if (i + 1 < maxIdx) { + buffer[i + 1] += r << 32L; + } + } + } + + // Checks if the buffer is empty (contains nothing but zeros) + // @param buffer the buffer to check + // @return {@code true} if the buffer is empty, {@code false} otherwise + private boolean isEmpty(long[] buffer) { + for (int i = (0); i < ((buffer).length); i++) { + if (buffer[i] != 0L) { + return false; + } + } + return true; + } + + // Adds one to a decimal number represented as a sequence of decimal digits. propagates carry as + // needed, so that {@code addCarryTo("6789") = "6790", addCarryTo("9999") = "10000"} etc. + // @return 1 if an additional higher "1" was added in front of the number as a result of + // rounding-up, 0 otherwise + private int addCarry(byte[] digits) { + for (int i = ((digits).length) - 1; i >= (0); i--) { // starting with the lowest digit + byte c = digits[i]; + if (c == 9) { + digits[i] = 0; + } else { + digits[i] = ((byte) (digits[i] + 1)); + return 0; + } + } + digits[0] = 1; + return 1; + } + + // Finds binary exponent, using decimal exponent and mantissa.
+ // exp2 = exp10 * log2(10) + log2(mant)
+ // @param exp10 decimal exponent + // @param mantissa array of longs containing decimal mantissa (divided by 10) + // @return found value of binary exponent + private double findBinaryExponent(int exp10, long[] mantissa) { + long mant10 = + mantissa[0] << 31L | ((mantissa[1]) >>> (1L)); // Higher 63 bits of the mantissa, in range + // 0x0CC..CCC -- 0x7FF..FFF (2^63/10 -- 2^63-1) + // decimal value of the mantissa in range 1.0..9.9999... + double mant10d = ((double) (mant10)) / TWO_POW_63_DIV_10; + return ((long) Math.floor(((double) (exp10)) * LOG2_10 + log2(mant10d))); // Binary exponent + } + + // Calculates log2 of the given x + // @param x argument that can't be 0 + // @return the value of log2(x) + private double log2(double x) { + // x can't be 0 + return LOG2_E * Math.log(x); + } + + private void findBinaryMantissa(int exp10, double exp2, long[] mantissa) { + // pow(2, -exp2): division by 2^exp2 is multiplication by 2^(-exp2) actually + long[] powerOf2 = this.buffer4x64B; + powerOfTwo(-exp2, powerOf2); + long[] product = this.buffer12x32; // use it for the product (M * 10^E / 2^e) + multUnpacked6x32byPacked(mantissa, powerOf2, product); // product in buff_12x32 + multBuffBy10(product); // "Quasidecimals" are numbers divided by 10 + // The powerOf2[0] is stored as an unsigned value + if (((long) (powerOf2[0])) != ((long) (-exp10))) { + // For some combinations of exp2 and exp10, additional multiplication needed + // (see mant2_from_M_E_e.xls) + multBuffBy10(product); + } + // compensate possible inaccuracy of logarithms used to compute exp2 + exp2 += normalizeMant(product); + exp2 += EXPONENT_BIAS; // add bias + // For subnormal values, exp2 <= 0. We just return 0 for them, as they are + // far from any range we are interested in. + if (exp2 <= 0) { + return; + } + exp2 += roundUp(product); // round up, may require exponent correction + if (((long) (exp2)) >= EXPONENT_OF_INFINITY) { + this.exponent = ((int) (long) (EXPONENT_OF_INFINITY)); + } else { + this.exponent = ((int) (long) (exp2)); + this.mantHi = ((product[0] << 32L) + product[1]); + this.mantLo = ((product[2] << 32L) + product[3]); + } + } + + // Calculates the required power and returns the result in the quasidecimal format (an array of + // longs, where result[0] is the decimal exponent of the resulting value, and result[1] -- + // result[3] contain 192 bits of the mantissa divided by ten (so that 8 looks like + //
{@code {1, 0xCCCC_.._CCCCL, 0xCCCC_.._CCCCL, 0xCCCC_.._CCCDL}}}
+ // uses arrays buffer4x64B, buffer6x32A, buffer6x32B, buffer12x32, + // @param exp the power to raise 2 to + // @param power (result) the value of {@code2^exp} + private void powerOfTwo(double exp, long[] power) { + if (exp == 0) { + array_copy(POS_POWERS_OF_2[0], power); + return; + } + // positive powers of 2 (2^0, 2^1, 2^2, 2^4, 2^8 ... 2^(2^31) ) + long[][] powers = (POS_POWERS_OF_2); + if (exp < 0) { + exp = -exp; + powers = (NEG_POWERS_OF_2); // positive powers of 2 (2^0, 2^-1, 2^-2, 2^-4, 2^-8 ... 2^30) + } + // 2^31 = 0x8000_0000L; a single bit that will be shifted right at every iteration + double currPowOf2 = POW_2_31; + int idx = 32; // Index in the table of powers + boolean first_power = true; + // if exp = b31 * 2^31 + b30 * 2^30 + .. + b0 * 2^0, where b0..b31 are the values of the bits in + // exp, then 2^exp = 2^b31 * 2^b30 ... * 2^b0. Find the product, using a table of powers of 2. + while (exp > 0) { + if (exp >= currPowOf2) { // the current bit in the exponent is 1 + if (first_power) { + // 4 longs, power[0] -- decimal (?) exponent, power[1..3] -- 192 bits of mantissa + array_copy((powers)[idx], power); + first_power = false; + } else { + // Multiply by the corresponding power of 2 + multPacked3x64_AndAdjustExponent(power, (powers)[idx], power); + } + exp -= currPowOf2; + } + idx -= 1; + currPowOf2 = currPowOf2 * 0.5; // Note: this is exact + } + } + + // Copies from into to. + private void array_copy(long[] source, long[] dest) { + for (int i = (0); i < ((dest).length); i++) { + dest[i] = source[i]; + } + } + + // Multiplies two quasidecimal numbers contained in buffers of 3 x 64 bits with exponents, puts + // the product to buffer4x64B
+ // and returns it. Both each of the buffers and the product contain 4 longs - exponent and 3 x 64 + // bits of mantissa. If the higher word of mantissa of the product is less than + // 0x1999_9999_9999_9999L (i.e. mantissa is less than 0.1) multiplies mantissa by 10 and adjusts + // the exponent respectively. + private void multPacked3x64_AndAdjustExponent(long[] factor1, long[] factor2, long[] result) { + multPacked3x64_simply(factor1, factor2, this.buffer12x32); + int expCorr = correctPossibleUnderflow(this.buffer12x32); + pack_6x32_to_3x64(this.buffer12x32, result); + // result[0] is a signed int64 value stored in an uint64 + result[0] = factor1[0] + factor2[0] + ((long) (expCorr)); // product.exp = f1.exp + f2.exp + } + + // Multiplies mantissas of two packed quasidecimal values (each is an array of 4 longs, exponent + + // 3 x 64 bits of mantissa) Returns the product as unpacked buffer of 12 x 32 (12 x 32 bits of + // product) + // uses arrays buffer6x32A, buffer6x32B + // @param factor1 an array of longs containing factor 1 as packed quasidecimal + // @param factor2 an array of longs containing factor 2 as packed quasidecimal + // @param result an array of 12 longs filled with the product of mantissas + private void multPacked3x64_simply(long[] factor1, long[] factor2, long[] result) { + for (int i = (0); i < ((result).length); i++) { + result[i] = 0L; + } + // TODO2 19.01.16 21:23:06 for the next version -- rebuild the table of powers to make the + // numbers unpacked, to avoid packing/unpacking + unpack_3x64_to_6x32(factor1, this.buffer6x32A); + unpack_3x64_to_6x32(factor2, this.buffer6x32B); + for (int i = (6) - 1; i >= (0); i--) { // compute partial 32-bit products + for (int j = (6) - 1; j >= (0); j--) { + long part = this.buffer6x32A[i] * this.buffer6x32B[j]; + result[j + i + 1] = (result[j + i + 1] + (part & LOWER_32_BITS)); + result[j + i] = (result[j + i] + ((part) >>> (32L))); + } + } + // Carry higher bits of the product to the lower bits of the next word + for (int i = (12) - 1; i >= (1); i--) { + result[i - 1] = (result[i - 1] + ((result[i]) >>> (32L))); + result[i] &= LOWER_32_BITS; + } + } + + // Corrects possible underflow of the decimal mantissa, passed in in the {@code mantissa}, by + // multiplying it by a power of ten. The corresponding value to adjust the decimal exponent is + // returned as the result + // @param mantissa a buffer containing the mantissa to be corrected + // @return a corrective (addition) that is needed to adjust the decimal exponent of the number + private int correctPossibleUnderflow(long[] mantissa) { + int expCorr = 0; + while (isLessThanOne(mantissa)) { // Underflow + multBuffBy10(mantissa); + expCorr -= 1; + } + return expCorr; + } + + // Checks if the unpacked quasidecimal value held in the given buffer is less than one (in this + // format, one is represented as { 0x1999_9999L, 0x9999_9999L, 0x9999_9999L,...} + // @param buffer a buffer containing the value to check + // @return {@code true}, if the value is less than one + private boolean isLessThanOne(long[] buffer) { + if (buffer[0] < 0x1999_9999L) { + return true; + } + if (buffer[0] > 0x1999_9999L) { + return false; + } + // A note regarding the coverage: + // Multiplying a 128-bit number by another 192-bit number, + // as well as multiplying of two 192-bit numbers, + // can never produce 320 (or 384 bits, respectively) of 0x1999_9999L, 0x9999_9999L, + for (int i = (1); i < ((buffer).length); i++) { + // so this loop can't be covered entirely + if (buffer[i] < 0x9999_9999L) { + return true; + } + if (buffer[i] > 0x9999_9999L) { + return false; + } + } + // and it can never reach this point in real life. + return false; // Still Java requires the return statement here. + } + + // Multiplies unpacked 192-bit value by a packed 192-bit factor
+ // uses static arrays buffer6x32B + // @param factor1 a buffer containing unpacked quasidecimal mantissa (6 x 32 bits) + // @param factor2 an array of 4 longs containing packed quasidecimal power of two + // @param product a buffer of at least 12 longs to hold the product + private void multUnpacked6x32byPacked(long[] factor1, long[] factor2, long[] product) { + for (int i = (0); i < ((product).length); i++) { + product[i] = 0L; + } + long[] unpacked2 = this.buffer6x32B; + unpack_3x64_to_6x32(factor2, unpacked2); // It's the powerOf2, with exponent in 0'th word + int maxFactIdx = (factor1).length; + for (int i = (maxFactIdx) - 1; i >= (0); i--) { // compute partial 32-bit products + for (int j = (maxFactIdx) - 1; j >= (0); j--) { + long part = factor1[i] * unpacked2[j]; + product[j + i + 1] = (product[j + i + 1] + (part & LOWER_32_BITS)); + product[j + i] = (product[j + i] + ((part) >>> (32L))); + } + } + // Carry higher bits of the product to the lower bits of the next word + for (int i = (12) - 1; i >= (1); i--) { + product[i - 1] = (product[i - 1] + ((product[i]) >>> (32L))); + product[i] &= LOWER_32_BITS; + } + } + + // Multiplies the unpacked value stored in the given buffer by 10 + // @param buffer contains the unpacked value to multiply (32 least significant bits are used) + private void multBuffBy10(long[] buffer) { + int maxIdx = (buffer).length - 1; + buffer[0] &= LOWER_32_BITS; + buffer[maxIdx] *= 10L; + for (int i = (maxIdx) - 1; i >= (0); i--) { + buffer[i] = (buffer[i] * 10L + ((buffer[i + 1]) >>> (32L))); + buffer[i + 1] &= LOWER_32_BITS; + } + } + + // Makes sure that the (unpacked) mantissa is normalized, + // i.e. buff[0] contains 1 in bit 32 (the implied integer part) and higher 32 of mantissa in bits + // 31..0, + // and buff[1]..buff[4] contain other 96 bits of mantissa in their lower halves: + //
0x0000_0001_XXXX_XXXXL, 0x0000_0000_XXXX_XXXXL...
+ // If necessary, divides the mantissa by appropriate power of 2 to make it normal. + // @param mantissa a buffer containing unpacked mantissa + // @return if the mantissa was not normal initially, a correction that should be added to the + // result's exponent, or 0 otherwise + private int normalizeMant(long[] mantissa) { + int expCorr = 31 - Long.numberOfLeadingZeros(mantissa[0]); + if (expCorr != 0) { + divBuffByPower2(mantissa, expCorr); + } + return expCorr; + } + + // Rounds up the contents of the unpacked buffer to 128 bits by adding unity one bit lower than + // the lowest of these 128 bits. If carry propagates up to bit 33 of buff[0], shifts the buffer + // rightwards to keep it normalized. + // @param mantissa the buffer to get rounded + // @return 1 if the buffer was shifted, 0 otherwise + private int roundUp(long[] mantissa) { + // due to the limited precision of the power of 2, a number with exactly half LSB in its + // mantissa + // (i.e that would have 0x8000_0000_0000_0000L in bits 128..191 if it were computed precisely), + // after multiplication by this power of 2, may get erroneous bits 185..191 (counting from the + // MSB), + // taking a value from + // 0xXXXX_XXXX_XXXX_XXXXL 0xXXXX_XXXX_XXXX_XXXXL 0x7FFF_FFFF_FFFF_FFD8L. + // to + // 0xXXXX_XXXX_XXXX_XXXXL 0xXXXX_XXXX_XXXX_XXXXL 0x8000_0000_0000_0014L, or something alike. + // To round it up, we first add + // 0x0000_0000_0000_0000L 0x0000_0000_0000_0000L 0x0000_0000_0000_0028L, to turn it into + // 0xXXXX_XXXX_XXXX_XXXXL 0xXXXX_XXXX_XXXX_XXXXL 0x8000_0000_0000_00XXL, + // and then add + // 0x0000_0000_0000_0000L 0x0000_0000_0000_0000L 0x8000_0000_0000_0000L, to provide carry to + // higher bits. + addToBuff(mantissa, 5, 100L); // to compensate possible inaccuracy + addToBuff(mantissa, 4, 0x8000_0000L); // round-up, if bits 128..159 >= 0x8000_0000L + if ((mantissa[0] & (HIGHER_32_BITS << 1L)) != 0L) { + // carry's got propagated beyond the highest bit + divBuffByPower2(mantissa, 1); + return 1; + } + return 0; + } + + // converts 192 most significant bits of the mantissa of a number from an unpacked quasidecimal + // form (where 32 least significant bits only used) to a packed quasidecimal form (where buff[0] + // contains the exponent and buff[1]..buff[3] contain 3 x 64 = 192 bits of mantissa) + // @param unpackedMant a buffer of at least 6 longs containing an unpacked value + // @param result a buffer of at least 4 long to hold the packed value + // @return packedQD192 with words 1..3 filled with the packed mantissa. packedQD192[0] is not + // affected. + private void pack_6x32_to_3x64(long[] unpackedMant, long[] result) { + result[1] = (unpackedMant[0] << 32L) + unpackedMant[1]; + result[2] = (unpackedMant[2] << 32L) + unpackedMant[3]; + result[3] = (unpackedMant[4] << 32L) + unpackedMant[5]; + } + + // Unpacks the mantissa of a 192-bit quasidecimal (4 longs: exp10, mantHi, mantMid, mantLo) to a + // buffer of 6 longs, where the least significant 32 bits of each long contains respective 32 bits + // of the mantissa + // @param qd192 array of 4 longs containing the number to unpack + // @param buff_6x32 buffer of 6 long to hold the unpacked mantissa + private void unpack_3x64_to_6x32(long[] qd192, long[] buff_6x32) { + buff_6x32[0] = ((qd192[1]) >>> (32L)); + buff_6x32[1] = qd192[1] & LOWER_32_BITS; + buff_6x32[2] = ((qd192[2]) >>> (32L)); + buff_6x32[3] = qd192[2] & LOWER_32_BITS; + buff_6x32[4] = ((qd192[3]) >>> (32L)); + buff_6x32[5] = qd192[3] & LOWER_32_BITS; + } + + // Divides the contents of the buffer by 2^exp2
+ // (shifts the buffer rightwards by exp2 if the exp2 is positive, and leftwards if it's negative), + // keeping it unpacked (only lower 32 bits of each element are used, except the buff[0] whose + // higher half is intended to contain integer part) + // @param buffer the buffer to divide + // @param exp2 the exponent of the power of two to divide by, expected to be + private void divBuffByPower2(long[] buffer, int exp2) { + int maxIdx = (buffer).length - 1; + long backShift = ((long) (32 - Math.abs(exp2))); + if (exp2 > 0) { // Shift to the right + long exp2Shift = ((long) (exp2)); + for (int i = (maxIdx + 1) - 1; i >= (1); i--) { + buffer[i] = ((buffer[i]) >>> (exp2Shift)) | ((buffer[i - 1] << backShift) & LOWER_32_BITS); + } + buffer[0] = ((buffer[0]) >>> (exp2Shift)); // Preserve the high half of buff[0] + } else if (exp2 < 0) { // Shift to the left + long exp2Shift = ((long) (-exp2)); + buffer[0] = + ((buffer[0] << exp2Shift) + | ((buffer[1]) >>> (backShift))); // Preserve the high half of buff[0] + for (int i = (1); i < (maxIdx); i++) { + buffer[i] = + (((buffer[i] << exp2Shift) & LOWER_32_BITS) | ((buffer[i + 1]) >>> (backShift))); + } + buffer[maxIdx] = (buffer[maxIdx] << exp2Shift) & LOWER_32_BITS; + } + } + + // Adds the summand to the idx'th word of the unpacked value stored in the buffer + // and propagates carry as necessary + // @param buff the buffer to add the summand to + // @param idx the index of the element to which the summand is to be added + // @param summand the summand to add to the idx'th element of the buffer + private void addToBuff(long[] buff, int idx, long summand) { + int maxIdx = idx; + buff[maxIdx] = (buff[maxIdx] + summand); // Big-endian, the lowest word + for (int i = (maxIdx + 1) - 1; + i >= (1); + i--) { // from the lowest word upwards, except the highest + if ((buff[i] & HIGHER_32_BITS) != 0L) { + buff[i] &= LOWER_32_BITS; + buff[i - 1] += 1L; + } else { + break; + } + } + } +} diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataReader.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataReader.java index 347677bd563..e6170cb33d4 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataReader.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataReader.java @@ -459,7 +459,8 @@ private Value parseScalarValue(Object input, ParseContext context) { return parseRegexValue((RegexValue) input); } else if (input instanceof Int32Value) { return parseInteger32Value((Int32Value) input); - + } else if (input instanceof Decimal128Value) { + return parseDecimal128Value((Decimal128Value) input); } else if (input.getClass().isArray()) { throw context.createError("Arrays are not supported; use a List instead"); } else { @@ -550,6 +551,14 @@ private Value parseInteger32Value(Int32Value int32) { return Value.newBuilder().setMapValue(mapBuilder).build(); } + private Value parseDecimal128Value(Decimal128Value decimal128) { + MapValue.Builder mapBuilder = MapValue.newBuilder(); + mapBuilder.putFields( + Values.RESERVED_DECIMAL128_KEY, + Value.newBuilder().setStringValue(decimal128.stringValue).build()); + return Value.newBuilder().setMapValue(mapBuilder).build(); + } + private Value parseTimestamp(Timestamp timestamp) { // Firestore backend truncates precision down to microseconds. To ensure offline mode works // the same with regards to truncation, perform the truncation immediately without waiting for diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataWriter.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataWriter.java index 2ad4af8d2c0..08fb8292283 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataWriter.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/UserDataWriter.java @@ -87,7 +87,11 @@ public Object convertValue(Value value) { return value.getBooleanValue(); case TYPE_ORDER_NUMBER: if (value.getValueTypeCase() == MAP_VALUE) { - return convertInt32(value.getMapValue().getFieldsMap()); + if (Values.isInt32Value(value)) { + return convertInt32(value.getMapValue().getFieldsMap()); + } else if (Values.isDecimal128Value(value)) { + return convertDecimal128(value.getMapValue().getFieldsMap()); + } } return value.getValueTypeCase().equals(Value.ValueTypeCase.INTEGER_VALUE) ? (Object) value.getIntegerValue() // Cast to Object to prevent type coercion to double @@ -176,6 +180,10 @@ Int32Value convertInt32(Map mapValue) { return new Int32Value((int) mapValue.get(Values.RESERVED_INT32_KEY).getIntegerValue()); } + Decimal128Value convertDecimal128(Map mapValue) { + return new Decimal128Value(mapValue.get(Values.RESERVED_DECIMAL128_KEY).getStringValue()); + } + private Object convertServerTimestamp(Value serverTimestampValue) { switch (serverTimestampBehavior) { case PREVIOUS: diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/index/FirestoreIndexValueWriter.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/index/FirestoreIndexValueWriter.java index 87a0ef0221b..80ca51b9224 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/index/FirestoreIndexValueWriter.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/index/FirestoreIndexValueWriter.java @@ -14,6 +14,8 @@ package com.google.firebase.firestore.index; +import static com.google.firebase.firestore.model.Values.NULL_VALUE; + import com.google.firebase.firestore.model.ResourcePath; import com.google.firebase.firestore.model.Values; import com.google.firestore.v1.ArrayValue; @@ -80,17 +82,7 @@ private void writeIndexValueAux(Value indexValue, DirectionalIndexByteEncoder en encoder.writeLong(indexValue.getBooleanValue() ? 1 : 0); break; case DOUBLE_VALUE: - double number = indexValue.getDoubleValue(); - if (Double.isNaN(number)) { - writeValueTypeLabel(encoder, INDEX_TYPE_NAN); - break; - } - writeValueTypeLabel(encoder, INDEX_TYPE_NUMBER); - if (number == -0.0) { - encoder.writeDouble(0.0); // -0.0, 0 and 0.0 are all considered the same - } else { - encoder.writeDouble(number); - } + writeIndexDouble(indexValue.getDoubleValue(), encoder); break; case INTEGER_VALUE: writeValueTypeLabel(encoder, INDEX_TYPE_NUMBER); @@ -98,10 +90,7 @@ private void writeIndexValueAux(Value indexValue, DirectionalIndexByteEncoder en encoder.writeDouble(indexValue.getIntegerValue()); break; case TIMESTAMP_VALUE: - Timestamp timestamp = indexValue.getTimestampValue(); - writeValueTypeLabel(encoder, INDEX_TYPE_TIMESTAMP); - encoder.writeLong(timestamp.getSeconds()); - encoder.writeLong(timestamp.getNanos()); + writeIndexTimestamp(indexValue.getTimestampValue(), encoder); break; case STRING_VALUE: writeIndexString(indexValue.getStringValue(), encoder); @@ -116,51 +105,54 @@ private void writeIndexValueAux(Value indexValue, DirectionalIndexByteEncoder en writeIndexEntityRef(indexValue.getReferenceValue(), encoder); break; case GEO_POINT_VALUE: - LatLng geoPoint = indexValue.getGeoPointValue(); - writeValueTypeLabel(encoder, INDEX_TYPE_GEOPOINT); - encoder.writeDouble(geoPoint.getLatitude()); - encoder.writeDouble(geoPoint.getLongitude()); + writeIndexGeoPoint(indexValue.getGeoPointValue(), encoder); break; case MAP_VALUE: Values.MapRepresentation mapType = Values.detectMapRepresentation(indexValue); - if (mapType.equals(Values.MapRepresentation.INTERNAL_MAX)) { - writeValueTypeLabel(encoder, Integer.MAX_VALUE); - break; - } else if (mapType.equals(Values.MapRepresentation.VECTOR)) { - writeIndexVector(indexValue.getMapValue(), encoder); - break; - } else if (mapType.equals(Values.MapRepresentation.REGEX)) { - writeIndexRegex(indexValue.getMapValue(), encoder); - break; - } else if (mapType.equals(Values.MapRepresentation.BSON_TIMESTAMP)) { - writeIndexBsonTimestamp(indexValue.getMapValue(), encoder); - break; - } else if (mapType.equals(Values.MapRepresentation.BSON_OBJECT_ID)) { - writeIndexBsonObjectId(indexValue.getMapValue(), encoder); - break; - } else if (mapType.equals(Values.MapRepresentation.BSON_BINARY)) { - writeIndexBsonBinaryData(indexValue.getMapValue(), encoder); - break; - } else if (mapType.equals(Values.MapRepresentation.INT32)) { - writeValueTypeLabel(encoder, INDEX_TYPE_NUMBER); - // Double and Int32 sort the same - encoder.writeDouble( - indexValue - .getMapValue() - .getFieldsMap() - .get(Values.RESERVED_INT32_KEY) - .getIntegerValue()); - break; - } else if (mapType.equals(Values.MapRepresentation.MIN_KEY)) { - writeValueTypeLabel(encoder, INDEX_TYPE_MIN_KEY); - break; - } else if (mapType.equals(Values.MapRepresentation.MAX_KEY)) { - writeValueTypeLabel(encoder, INDEX_TYPE_MAX_KEY); - break; + switch (mapType) { + case INTERNAL_MAX: + writeValueTypeLabel(encoder, Integer.MAX_VALUE); + break; + case VECTOR: + writeIndexVector(indexValue.getMapValue(), encoder); + break; + case REGEX: + writeIndexRegex(indexValue.getMapValue(), encoder); + break; + case BSON_TIMESTAMP: + writeIndexBsonTimestamp(indexValue.getMapValue(), encoder); + break; + case BSON_OBJECT_ID: + writeIndexBsonObjectId(indexValue.getMapValue(), encoder); + break; + case BSON_BINARY: + writeIndexBsonBinaryData(indexValue.getMapValue(), encoder); + break; + case INT32: + writeIndexInt32(indexValue.getMapValue(), encoder); + break; + case DECIMAL128: + // Double and Decimal128 sort the same + // Decimal128 is written as double with precision lost + double number = + Double.parseDouble( + indexValue + .getMapValue() + .getFieldsMap() + .get(Values.RESERVED_DECIMAL128_KEY) + .getStringValue()); + writeIndexDouble(number, encoder); + break; + case MIN_KEY: + writeValueTypeLabel(encoder, INDEX_TYPE_MIN_KEY); + break; + case MAX_KEY: + writeValueTypeLabel(encoder, INDEX_TYPE_MAX_KEY); + break; + default: + writeIndexMap(indexValue.getMapValue(), encoder); + writeTruncationMarker(encoder); } - - writeIndexMap(indexValue.getMapValue(), encoder); - writeTruncationMarker(encoder); break; case ARRAY_VALUE: writeIndexArray(indexValue.getArrayValue(), encoder); @@ -182,6 +174,37 @@ private void writeUnlabeledIndexString( encoder.writeString(stringIndexValue); } + private void writeIndexDouble(double number, DirectionalIndexByteEncoder encoder) { + if (Double.isNaN(number)) { + writeValueTypeLabel(encoder, INDEX_TYPE_NAN); + return; + } + writeValueTypeLabel(encoder, INDEX_TYPE_NUMBER); + if (number == -0.0) { + encoder.writeDouble(0.0); // -0.0, 0 and 0.0 are all considered the same + } else { + encoder.writeDouble(number); + } + } + + private void writeIndexInt32(MapValue mapValue, DirectionalIndexByteEncoder encoder) { + writeValueTypeLabel(encoder, INDEX_TYPE_NUMBER); + // Double and Int32 sort the same + encoder.writeDouble(mapValue.getFieldsMap().get(Values.RESERVED_INT32_KEY).getIntegerValue()); + } + + private void writeIndexTimestamp(Timestamp timestamp, DirectionalIndexByteEncoder encoder) { + writeValueTypeLabel(encoder, INDEX_TYPE_TIMESTAMP); + encoder.writeLong(timestamp.getSeconds()); + encoder.writeLong(timestamp.getNanos()); + } + + private void writeIndexGeoPoint(LatLng geoPoint, DirectionalIndexByteEncoder encoder) { + writeValueTypeLabel(encoder, INDEX_TYPE_GEOPOINT); + encoder.writeDouble(geoPoint.getLatitude()); + encoder.writeDouble(geoPoint.getLongitude()); + } + private void writeIndexVector(MapValue mapIndexValue, DirectionalIndexByteEncoder encoder) { Map map = mapIndexValue.getFieldsMap(); String key = Values.VECTOR_MAP_VECTORS_KEY; diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.java index 71456829ffb..2f39c96a826 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/model/Values.java @@ -19,6 +19,7 @@ import static com.google.firebase.firestore.util.Assert.hardAssert; import androidx.annotation.Nullable; +import com.google.firebase.firestore.Quadruple; import com.google.firebase.firestore.util.Util; import com.google.firestore.v1.ArrayValue; import com.google.firestore.v1.ArrayValueOrBuilder; @@ -56,6 +57,9 @@ public class Values { // For Int32 type public static final String RESERVED_INT32_KEY = "__int__"; + // For Decimal128 type. + public static final String RESERVED_DECIMAL128_KEY = "__decimal128__"; + // For RequestTimestamp public static final String RESERVED_BSON_TIMESTAMP_KEY = "__request_timestamp__"; @@ -142,28 +146,30 @@ public static int typeOrder(Value value) { return TYPE_ORDER_ARRAY; case MAP_VALUE: MapRepresentation mapType = detectMapRepresentation(value); - if (mapType.equals(MapRepresentation.SERVER_TIMESTAMP)) { - return TYPE_ORDER_SERVER_TIMESTAMP; - } else if (mapType.equals(MapRepresentation.INTERNAL_MAX)) { - return TYPE_ORDER_MAX_VALUE; - } else if (mapType.equals(MapRepresentation.VECTOR)) { - return TYPE_ORDER_VECTOR; - } else if (mapType.equals(MapRepresentation.MIN_KEY)) { - return TYPE_ORDER_MIN_KEY; - } else if (mapType.equals(MapRepresentation.MAX_KEY)) { - return TYPE_ORDER_MAX_KEY; - } else if (mapType.equals(MapRepresentation.REGEX)) { - return TYPE_ORDER_REGEX; - } else if (mapType.equals(MapRepresentation.BSON_TIMESTAMP)) { - return TYPE_ORDER_BSON_TIMESTAMP; - } else if (mapType.equals(MapRepresentation.BSON_OBJECT_ID)) { - return TYPE_ORDER_BSON_OBJECT_ID; - } else if (mapType.equals(MapRepresentation.BSON_BINARY)) { - return TYPE_ORDER_BSON_BINARY; - } else if (mapType.equals(MapRepresentation.INT32)) { - return TYPE_ORDER_NUMBER; - } else { - return TYPE_ORDER_MAP; + switch (mapType) { + case SERVER_TIMESTAMP: + return TYPE_ORDER_SERVER_TIMESTAMP; + case INTERNAL_MAX: + return TYPE_ORDER_MAX_VALUE; + case VECTOR: + return TYPE_ORDER_VECTOR; + case MIN_KEY: + return TYPE_ORDER_MIN_KEY; + case MAX_KEY: + return TYPE_ORDER_MAX_KEY; + case REGEX: + return TYPE_ORDER_REGEX; + case BSON_TIMESTAMP: + return TYPE_ORDER_BSON_TIMESTAMP; + case BSON_OBJECT_ID: + return TYPE_ORDER_BSON_OBJECT_ID; + case BSON_BINARY: + return TYPE_ORDER_BSON_BINARY; + case INT32: + case DECIMAL128: + return TYPE_ORDER_NUMBER; + default: + return TYPE_ORDER_MAP; } default: throw fail("Invalid value type: " + value.getValueTypeCase()); @@ -206,16 +212,16 @@ public static boolean equals(Value left, Value right) { } private static boolean numberEquals(Value left, Value right) { - if (left.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE - && right.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE) { + if ((isInt64Value(left) && isInt64Value(right)) + || (isInt32Value(left) && isInt32Value(right))) { return getIntegerValue(left) == getIntegerValue(right); - } else if (left.getValueTypeCase() == Value.ValueTypeCase.DOUBLE_VALUE - && right.getValueTypeCase() == Value.ValueTypeCase.DOUBLE_VALUE) { + } else if (isDouble(left) && isDouble(right)) { return Double.doubleToLongBits(left.getDoubleValue()) == Double.doubleToLongBits(right.getDoubleValue()); - } else if (detectMapRepresentation(left).equals(MapRepresentation.INT32) - && detectMapRepresentation(right).equals(MapRepresentation.INT32)) { - return getIntegerValue(left) == getIntegerValue(right); + } else if (isDecimal128Value(left) && isDecimal128Value(right)) { + Quadruple leftQuadruple = Quadruple.fromString(getDecimal128StringValue(left)); + Quadruple rightQuadruple = Quadruple.fromString(getDecimal128StringValue(right)); + return Util.compareQuadruples(leftQuadruple, rightQuadruple) == 0; } return false; @@ -229,12 +235,20 @@ private static long getIntegerValue(Value value) { if (value.hasIntegerValue()) { return value.getIntegerValue(); } - if (value.hasMapValue() && value.getMapValue().getFieldsMap().containsKey(RESERVED_INT32_KEY)) { + if (isInt32Value(value)) { return value.getMapValue().getFieldsMap().get(RESERVED_INT32_KEY).getIntegerValue(); } throw new IllegalArgumentException("getIntegerValue was called with a non-integer argument"); } + private static String getDecimal128StringValue(Value value) { + if (isDecimal128Value(value)) { + return value.getMapValue().getFieldsMap().get(RESERVED_DECIMAL128_KEY).getStringValue(); + } + throw new IllegalArgumentException( + "getDecimal128Value was called with a non-decimal128 argument"); + } + private static boolean arrayEquals(Value left, Value right) { ArrayValue leftArray = left.getArrayValue(); ArrayValue rightArray = right.getArrayValue(); @@ -362,24 +376,31 @@ public static int upperBoundCompare( } private static int compareNumbers(Value left, Value right) { - if (left.getValueTypeCase() == Value.ValueTypeCase.DOUBLE_VALUE) { + // If either argument is Decimal128, we cast both to wider (128-bit) representation, and compare + // Quadruple values. + if (isDecimal128Value(left) || isDecimal128Value(right)) { + Quadruple leftQuadruple = convertNumberToQuadruple(left); + Quadruple rightQuadruple = convertNumberToQuadruple(right); + return Util.compareQuadruples(leftQuadruple, rightQuadruple); + } + + if (isDouble(left)) { double leftDouble = left.getDoubleValue(); - if (right.getValueTypeCase() == Value.ValueTypeCase.DOUBLE_VALUE) { + if (isDouble(right)) { // left and right are both doubles. return Util.compareDoubles(leftDouble, right.getDoubleValue()); - } else if (right.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE - || detectMapRepresentation(right) == MapRepresentation.INT32) { - // left is a double and right is a 32/64-bit integer. + } else if (isIntegerValue(right)) { + // left is a double and right is a 32/64-bit integer value. return Util.compareMixed(leftDouble, getIntegerValue(right)); } - } else if (left.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE - || detectMapRepresentation(left) == MapRepresentation.INT32) { + } + + if (isIntegerValue(left)) { long leftLong = getIntegerValue(left); - if (right.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE - || detectMapRepresentation(right) == MapRepresentation.INT32) { - // left and right both a 32/64-bit integer. + if (isIntegerValue(right)) { + // left and right both a 32/64-bit integer value. return Util.compareLongs(leftLong, getIntegerValue(right)); - } else if (right.getValueTypeCase() == Value.ValueTypeCase.DOUBLE_VALUE) { + } else if (isDouble(right)) { // left is a 32/64-bit integer and right is a double . return -1 * Util.compareMixed(right.getDoubleValue(), leftLong); } @@ -388,6 +409,30 @@ private static int compareNumbers(Value left, Value right) { throw fail("Unexpected values: %s vs %s", left, right); } + /** + * Converts the given number value to a Quadruple. Throws an exception if the value is not a + * number. + */ + private static Quadruple convertNumberToQuadruple(Value value) { + // Doubles + if (isDouble(value)) { + return Quadruple.fromDouble(value.getDoubleValue()); + } + + // 64-bit or 32-bit integers. + if (isInt64Value(value) || isInt32Value(value)) { + return Quadruple.fromLong(getIntegerValue(value)); + } + + // Decimal128 numbers + if (isDecimal128Value(value)) { + return Quadruple.fromString(getDecimal128StringValue(value)); + } + + throw new IllegalArgumentException( + "convertNumberToQuadruple was called on a non-numeric value."); + } + private static int compareTimestamps(Timestamp left, Timestamp right) { int cmp = Util.compareLongs(left.getSeconds(), right.getSeconds()); if (cmp != 0) { @@ -609,7 +654,7 @@ private static void canonifyArray(StringBuilder builder, ArrayValue arrayValue) } /** Returns true if `value` is a INTEGER_VALUE. */ - public static boolean isInteger(@Nullable Value value) { + public static boolean isInt64Value(@Nullable Value value) { return value != null && value.getValueTypeCase() == Value.ValueTypeCase.INTEGER_VALUE; } @@ -620,7 +665,12 @@ public static boolean isDouble(@Nullable Value value) { /** Returns true if `value` is either a INTEGER_VALUE or a DOUBLE_VALUE. */ public static boolean isNumber(@Nullable Value value) { - return isInteger(value) || isDouble(value); + return isInt64Value(value) || isDouble(value); + } + + /** Returns true if `value` is a INTEGER_VALUE or a Int32 Value. */ + public static boolean isIntegerValue(@Nullable Value value) { + return isInt64Value(value) || isInt32Value(value); } /** Returns true if `value` is an ARRAY_VALUE. */ @@ -637,7 +687,20 @@ public static boolean isNullValue(@Nullable Value value) { } public static boolean isNanValue(@Nullable Value value) { - return value != null && Double.isNaN(value.getDoubleValue()); + if (value != null && Double.isNaN(value.getDoubleValue())) { + return true; + } + + if (isDecimal128Value(value)) { + return value + .getMapValue() + .getFieldsMap() + .get(RESERVED_DECIMAL128_KEY) + .getStringValue() + .equals("NaN"); + } + + return false; } public static boolean isMapValue(@Nullable Value value) { @@ -772,26 +835,28 @@ public static Value getLowerBound(Value value) { case MAP_VALUE: MapRepresentation mapType = detectMapRepresentation(value); // VectorValue sorts after ArrayValue and before an empty MapValue - if (mapType.equals(MapRepresentation.VECTOR)) { - return MIN_VECTOR_VALUE; - } else if (mapType.equals(MapRepresentation.BSON_OBJECT_ID)) { - return MIN_BSON_OBJECT_ID_VALUE; - } else if (mapType.equals(MapRepresentation.BSON_TIMESTAMP)) { - return MIN_BSON_TIMESTAMP_VALUE; - } else if (mapType.equals(MapRepresentation.BSON_BINARY)) { - return MIN_BSON_BINARY_VALUE; - } else if (mapType.equals(MapRepresentation.REGEX)) { - return MIN_REGEX_VALUE; - } else if (mapType.equals(MapRepresentation.INT32)) { - // int32Value is treated the same as integerValue and doubleValue - return MIN_NUMBER; - } else if (mapType.equals(MapRepresentation.MIN_KEY)) { - return MIN_KEY_VALUE; - } else if (mapType.equals(MapRepresentation.MAX_KEY)) { - return MAX_KEY_VALUE; + switch (mapType) { + case VECTOR: + return MIN_VECTOR_VALUE; + case BSON_OBJECT_ID: + return MIN_BSON_OBJECT_ID_VALUE; + case BSON_TIMESTAMP: + return MIN_BSON_TIMESTAMP_VALUE; + case BSON_BINARY: + return MIN_BSON_BINARY_VALUE; + case REGEX: + return MIN_REGEX_VALUE; + case INT32: + case DECIMAL128: + // Int32Value and Decimal128Value are treated the same as integerValue and doubleValue + return MIN_NUMBER; + case MIN_KEY: + return MIN_KEY_VALUE; + case MAX_KEY: + return MAX_KEY_VALUE; + default: + return MIN_MAP; } - - return MIN_MAP; default: throw new IllegalArgumentException("Unknown value type: " + value.getValueTypeCase()); } @@ -821,102 +886,110 @@ public static Value getUpperBound(Value value) { return MIN_VECTOR_VALUE; case MAP_VALUE: MapRepresentation mapType = detectMapRepresentation(value); - if (mapType.equals(MapRepresentation.VECTOR)) { - return MIN_MAP; - } - if (mapType.equals(MapRepresentation.BSON_OBJECT_ID)) { - return MIN_GEO_POINT; - } - if (mapType.equals(MapRepresentation.BSON_TIMESTAMP)) { - return MIN_STRING; + switch (mapType) { + case VECTOR: + return MIN_MAP; + case BSON_OBJECT_ID: + return MIN_GEO_POINT; + case BSON_TIMESTAMP: + return MIN_STRING; + case BSON_BINARY: + return MIN_REFERENCE; + case REGEX: + return MIN_ARRAY; + case INT32: + case DECIMAL128: + // Int32Value and decimal128Value are treated the same as integerValue and doubleValue + return MIN_TIMESTAMP; + case MIN_KEY: + return MIN_BOOLEAN; + case MAX_KEY: + return INTERNAL_MAX_VALUE; + default: + return MAX_KEY_VALUE; } - if (mapType.equals(MapRepresentation.BSON_BINARY)) { - return MIN_REFERENCE; - } - if (mapType.equals(MapRepresentation.REGEX)) { - return MIN_ARRAY; - } - if (mapType.equals(MapRepresentation.INT32)) { - // int32Value is treated the same as integerValue and doubleValue - return MIN_TIMESTAMP; - } - if (mapType.equals(MapRepresentation.MIN_KEY)) { - return MIN_BOOLEAN; - } - if (mapType.equals(MapRepresentation.MAX_KEY)) { - return INTERNAL_MAX_VALUE; - } - - return MAX_KEY_VALUE; default: throw new IllegalArgumentException("Unknown value type: " + value.getValueTypeCase()); } } - static boolean isMinKey(Map fields) { + private static boolean isMapWithSingleFieldOfType( + Value value, String key, Value.ValueTypeCase typeCase) { + if (value == null + || value.getMapValue() == null + || value.getMapValue().getFieldsMap() == null) { + return false; + } + + Map fields = value.getMapValue().getFieldsMap(); return fields.size() == 1 - && fields.containsKey(RESERVED_MIN_KEY) - && fields.get(RESERVED_MIN_KEY).hasNullValue(); + && fields.containsKey(key) + && fields.get(key).getValueTypeCase() == typeCase; } - static boolean isMaxKey(Map fields) { - return fields.size() == 1 - && fields.containsKey(RESERVED_MAX_KEY) - && fields.get(RESERVED_MAX_KEY).hasNullValue(); + static boolean isMinKey(Value value) { + return isMapWithSingleFieldOfType(value, RESERVED_MIN_KEY, Value.ValueTypeCase.NULL_VALUE); } - static boolean isInt32Value(Map fields) { - return fields.size() == 1 - && fields.containsKey(RESERVED_INT32_KEY) - && fields.get(RESERVED_INT32_KEY).hasIntegerValue(); + static boolean isMaxKey(Value value) { + return isMapWithSingleFieldOfType(value, RESERVED_MAX_KEY, Value.ValueTypeCase.NULL_VALUE); } - static boolean isBsonObjectId(Map fields) { - return fields.size() == 1 - && fields.containsKey(RESERVED_OBJECT_ID_KEY) - && fields.get(RESERVED_OBJECT_ID_KEY).hasStringValue(); + public static boolean isInt32Value(Value value) { + return isMapWithSingleFieldOfType(value, RESERVED_INT32_KEY, Value.ValueTypeCase.INTEGER_VALUE); } - static boolean isBsonBinaryData(Map fields) { - return fields.size() == 1 - && fields.containsKey(RESERVED_BSON_BINARY_KEY) - && fields.get(RESERVED_BSON_BINARY_KEY).hasBytesValue(); - } - - static boolean isRegexValue(Map fields) { - if (fields.size() == 1 - && fields.containsKey(RESERVED_REGEX_KEY) - && fields.get(RESERVED_REGEX_KEY).hasMapValue()) { - MapValue innerMapValue = fields.get(RESERVED_REGEX_KEY).getMapValue(); - Map values = innerMapValue.getFieldsMap(); - return innerMapValue.getFieldsCount() == 2 - && values.containsKey(RESERVED_REGEX_PATTERN_KEY) - && values.containsKey(RESERVED_REGEX_OPTIONS_KEY) - && values.get(RESERVED_REGEX_PATTERN_KEY).hasStringValue() - && values.get(RESERVED_REGEX_OPTIONS_KEY).hasStringValue(); + public static boolean isDecimal128Value(Value value) { + return isMapWithSingleFieldOfType( + value, RESERVED_DECIMAL128_KEY, Value.ValueTypeCase.STRING_VALUE); + } + + static boolean isBsonObjectId(Value value) { + return isMapWithSingleFieldOfType( + value, RESERVED_OBJECT_ID_KEY, Value.ValueTypeCase.STRING_VALUE); + } + + static boolean isBsonBinaryData(Value value) { + return isMapWithSingleFieldOfType( + value, RESERVED_BSON_BINARY_KEY, Value.ValueTypeCase.BYTES_VALUE); + } + + static boolean isRegexValue(Value value) { + if (!isMapWithSingleFieldOfType(value, RESERVED_REGEX_KEY, Value.ValueTypeCase.MAP_VALUE)) { + return false; } - return false; + + MapValue innerMapValue = + value.getMapValue().getFieldsMap().get(RESERVED_REGEX_KEY).getMapValue(); + Map values = innerMapValue.getFieldsMap(); + return innerMapValue.getFieldsCount() == 2 + && values.containsKey(RESERVED_REGEX_PATTERN_KEY) + && values.containsKey(RESERVED_REGEX_OPTIONS_KEY) + && values.get(RESERVED_REGEX_PATTERN_KEY).hasStringValue() + && values.get(RESERVED_REGEX_OPTIONS_KEY).hasStringValue(); } - static boolean isBsonTimestamp(Map fields) { - if (fields.size() == 1 - && fields.containsKey(RESERVED_BSON_TIMESTAMP_KEY) - && fields.get(RESERVED_BSON_TIMESTAMP_KEY).hasMapValue()) { - MapValue innerMapValue = fields.get(RESERVED_BSON_TIMESTAMP_KEY).getMapValue(); - Map values = innerMapValue.getFieldsMap(); - return innerMapValue.getFieldsCount() == 2 - && values.containsKey(RESERVED_BSON_TIMESTAMP_SECONDS_KEY) - && values.containsKey(RESERVED_BSON_TIMESTAMP_INCREMENT_KEY) - && values.get(RESERVED_BSON_TIMESTAMP_SECONDS_KEY).hasIntegerValue() - && values.get(RESERVED_BSON_TIMESTAMP_INCREMENT_KEY).hasIntegerValue(); + static boolean isBsonTimestamp(Value value) { + if (!isMapWithSingleFieldOfType( + value, RESERVED_BSON_TIMESTAMP_KEY, Value.ValueTypeCase.MAP_VALUE)) { + return false; } - return false; + + MapValue innerMapValue = + value.getMapValue().getFieldsMap().get(RESERVED_BSON_TIMESTAMP_KEY).getMapValue(); + Map values = innerMapValue.getFieldsMap(); + return innerMapValue.getFieldsCount() == 2 + && values.containsKey(RESERVED_BSON_TIMESTAMP_SECONDS_KEY) + && values.containsKey(RESERVED_BSON_TIMESTAMP_INCREMENT_KEY) + && values.get(RESERVED_BSON_TIMESTAMP_SECONDS_KEY).hasIntegerValue() + && values.get(RESERVED_BSON_TIMESTAMP_INCREMENT_KEY).hasIntegerValue(); } public enum MapRepresentation { REGEX, BSON_OBJECT_ID, INT32, + DECIMAL128, BSON_TIMESTAMP, BSON_BINARY, MIN_KEY, @@ -934,52 +1007,48 @@ public static MapRepresentation detectMapRepresentation(Value value) { return MapRepresentation.REGULAR_MAP; } - Map fields = value.getMapValue().getFieldsMap(); - - // Check for type-based mappings - if (fields.containsKey(TYPE_KEY)) { - String typeString = fields.get(TYPE_KEY).getStringValue(); - - if (typeString.equals(RESERVED_VECTOR_KEY)) { - return MapRepresentation.VECTOR; - } - if (typeString.equals(RESERVED_MAX_KEY)) { - return MapRepresentation.INTERNAL_MAX; - } - if (typeString.equals(RESERVED_SERVER_TIMESTAMP_KEY)) { - return MapRepresentation.SERVER_TIMESTAMP; - } - } - - if (fields.size() != 1) { - // All BSON types have 1 key in the map. To improve performance, we can - // return early if the number of keys in the map is not 1. - return MapRepresentation.REGULAR_MAP; - } - // Check for BSON-related mappings - if (isRegexValue(fields)) { + if (isRegexValue(value)) { return MapRepresentation.REGEX; } - if (isBsonObjectId(fields)) { + if (isBsonObjectId(value)) { return MapRepresentation.BSON_OBJECT_ID; } - if (isInt32Value(fields)) { + if (isInt32Value(value)) { return MapRepresentation.INT32; } - if (isBsonTimestamp(fields)) { + if (isDecimal128Value(value)) { + return MapRepresentation.DECIMAL128; + } + if (isBsonTimestamp(value)) { return MapRepresentation.BSON_TIMESTAMP; } - if (isBsonBinaryData(fields)) { + if (isBsonBinaryData(value)) { return MapRepresentation.BSON_BINARY; } - if (isMinKey(fields)) { + if (isMinKey(value)) { return MapRepresentation.MIN_KEY; } - if (isMaxKey(fields)) { + if (isMaxKey(value)) { return MapRepresentation.MAX_KEY; } + Map fields = value.getMapValue().getFieldsMap(); + + // Check for type-based mappings + if (fields.containsKey(TYPE_KEY)) { + String typeString = fields.get(TYPE_KEY).getStringValue(); + if (typeString.equals(RESERVED_VECTOR_KEY)) { + return MapRepresentation.VECTOR; + } + if (typeString.equals(RESERVED_MAX_KEY)) { + return MapRepresentation.INTERNAL_MAX; + } + if (typeString.equals(RESERVED_SERVER_TIMESTAMP_KEY)) { + return MapRepresentation.SERVER_TIMESTAMP; + } + } + return MapRepresentation.REGULAR_MAP; } } diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/model/mutation/NumericIncrementTransformOperation.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/model/mutation/NumericIncrementTransformOperation.java index 0dae39ae03d..d690c71c32a 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/model/mutation/NumericIncrementTransformOperation.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/model/mutation/NumericIncrementTransformOperation.java @@ -15,7 +15,7 @@ package com.google.firebase.firestore.model.mutation; import static com.google.firebase.firestore.model.Values.isDouble; -import static com.google.firebase.firestore.model.Values.isInteger; +import static com.google.firebase.firestore.model.Values.isInt64Value; import static com.google.firebase.firestore.util.Assert.fail; import static com.google.firebase.firestore.util.Assert.hardAssert; @@ -44,10 +44,10 @@ public Value applyToLocalView(@Nullable Value previousValue, Timestamp localWrit Value baseValue = computeBaseValue(previousValue); // Return an integer value only if the previous value and the operand is an integer. - if (isInteger(baseValue) && isInteger(operand)) { + if (isInt64Value(baseValue) && isInt64Value(operand)) { long sum = safeIncrement(baseValue.getIntegerValue(), operandAsLong()); return Value.newBuilder().setIntegerValue(sum).build(); - } else if (isInteger(baseValue)) { + } else if (isInt64Value(baseValue)) { double sum = baseValue.getIntegerValue() + operandAsDouble(); return Value.newBuilder().setDoubleValue(sum).build(); } else { @@ -102,7 +102,7 @@ private long safeIncrement(long x, long y) { private double operandAsDouble() { if (isDouble(operand)) { return operand.getDoubleValue(); - } else if (isInteger(operand)) { + } else if (isInt64Value(operand)) { return operand.getIntegerValue(); } else { throw fail( @@ -114,7 +114,7 @@ private double operandAsDouble() { private long operandAsLong() { if (isDouble(operand)) { return (long) operand.getDoubleValue(); - } else if (isInteger(operand)) { + } else if (isInt64Value(operand)) { return operand.getIntegerValue(); } else { throw fail( diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/util/CustomClassMapper.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/util/CustomClassMapper.java index 074e6ef6a25..385e7e3bd2c 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/util/CustomClassMapper.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/util/CustomClassMapper.java @@ -23,6 +23,7 @@ import com.google.firebase.firestore.BsonBinaryData; import com.google.firebase.firestore.BsonObjectId; import com.google.firebase.firestore.BsonTimestamp; +import com.google.firebase.firestore.Decimal128Value; import com.google.firebase.firestore.DocumentId; import com.google.firebase.firestore.DocumentReference; import com.google.firebase.firestore.Exclude; @@ -187,6 +188,7 @@ private static Object serialize(T o, ErrorPath path) { || o instanceof MaxKey || o instanceof RegexValue || o instanceof Int32Value + || o instanceof Decimal128Value || o instanceof BsonTimestamp || o instanceof BsonObjectId || o instanceof BsonBinaryData) { @@ -261,6 +263,8 @@ private static T deserializeToClass(Object o, Class clazz, DeserializeCon return (T) convertVectorValue(o, context); } else if (Int32Value.class.isAssignableFrom(clazz)) { return (T) convertInt32Value(o, context); + } else if (Decimal128Value.class.isAssignableFrom(clazz)) { + return (T) convertDecimal128Value(o, context); } else if (BsonTimestamp.class.isAssignableFrom(clazz)) { return (T) convertBsonTimestamp(o, context); } else if (BsonObjectId.class.isAssignableFrom(clazz)) { @@ -580,6 +584,16 @@ private static Int32Value convertInt32Value(Object o, DeserializeContext context } } + private static Decimal128Value convertDecimal128Value(Object o, DeserializeContext context) { + if (o instanceof Decimal128Value) { + return (Decimal128Value) o; + } else { + throw deserializeError( + context.errorPath, + "Failed to convert value of type " + o.getClass().getName() + " to Decimal128Value"); + } + } + private static BsonTimestamp convertBsonTimestamp(Object o, DeserializeContext context) { if (o instanceof BsonTimestamp) { return (BsonTimestamp) o; diff --git a/firebase-firestore/src/main/java/com/google/firebase/firestore/util/Util.java b/firebase-firestore/src/main/java/com/google/firebase/firestore/util/Util.java index 2cc39337002..bc9c631e6e6 100644 --- a/firebase-firestore/src/main/java/com/google/firebase/firestore/util/Util.java +++ b/firebase-firestore/src/main/java/com/google/firebase/firestore/util/Util.java @@ -23,6 +23,7 @@ import com.google.firebase.firestore.FieldPath; import com.google.firebase.firestore.FirebaseFirestoreException; import com.google.firebase.firestore.FirebaseFirestoreException.Code; +import com.google.firebase.firestore.Quadruple; import com.google.protobuf.ByteString; import io.grpc.Status; import io.grpc.StatusException; @@ -145,6 +146,11 @@ public static int compareMixed(double doubleValue, long longValue) { return NumberComparisonHelper.firestoreCompareDoubleWithLong(doubleValue, longValue); } + /** Utility function to compare Quadruples (using Firestore semantics for NaN).*/ + public static int compareQuadruples(Quadruple left, Quadruple right) { + return NumberComparisonHelper.firestoreCompareQuadruples(left, right); + } + public static > Comparator comparator() { return Comparable::compareTo; } diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/BsonTypesTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/BsonTypesTest.java index 34ae59306db..f7cb4191d9f 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/BsonTypesTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/BsonTypesTest.java @@ -38,8 +38,6 @@ public void testBsonObjectIdEquality() { assertNotEquals(bsonObjectIdDup, differentObjectId); assertEquals(bsonObjectId.hashCode(), bsonObjectIdDup.hashCode()); - assertNotEquals(bsonObjectId.hashCode(), differentObjectId.hashCode()); - assertNotEquals(bsonObjectIdDup.hashCode(), differentObjectId.hashCode()); } @Test @@ -56,10 +54,6 @@ public void testBsonTimeStampEquality() { assertNotEquals(bsonTimestampDup, differentIncrementTimestamp); assertEquals(bsonTimestamp.hashCode(), bsonTimestampDup.hashCode()); - assertNotEquals(bsonTimestamp.hashCode(), differentSecondsTimestamp.hashCode()); - assertNotEquals(bsonTimestamp.hashCode(), differentIncrementTimestamp.hashCode()); - assertNotEquals(bsonTimestampDup.hashCode(), differentSecondsTimestamp.hashCode()); - assertNotEquals(bsonTimestampDup.hashCode(), differentIncrementTimestamp.hashCode()); } @Test @@ -76,10 +70,6 @@ public void testBsonBinaryDataEquality() { assertNotEquals(bsonBinaryDataDup, differentDataBinaryData); assertEquals(bsonBinaryData.hashCode(), bsonBinaryDataDup.hashCode()); - assertNotEquals(bsonBinaryData.hashCode(), differentSubtypeBinaryData.hashCode()); - assertNotEquals(bsonBinaryData.hashCode(), differentDataBinaryData.hashCode()); - assertNotEquals(bsonBinaryDataDup.hashCode(), differentSubtypeBinaryData.hashCode()); - assertNotEquals(bsonBinaryDataDup.hashCode(), differentDataBinaryData.hashCode()); } @Test @@ -96,10 +86,6 @@ public void testRegexEquality() { assertNotEquals(regexDup, differentOptionsRegex); assertEquals(regex.hashCode(), regexDup.hashCode()); - assertNotEquals(regex.hashCode(), differentPatternRegex.hashCode()); - assertNotEquals(regex.hashCode(), differentOptionsRegex.hashCode()); - assertNotEquals(regexDup.hashCode(), differentPatternRegex.hashCode()); - assertNotEquals(regexDup.hashCode(), differentOptionsRegex.hashCode()); } @Test @@ -113,8 +99,112 @@ public void testInt32Equality() { assertNotEquals(int32Dup, differentInt32); assertEquals(int32.hashCode(), int32Dup.hashCode()); - assertNotEquals(int32.hashCode(), differentInt32.hashCode()); - assertNotEquals(int32Dup.hashCode(), differentInt32.hashCode()); + } + + @Test + public void testDecimal128Equality() { + Decimal128Value decimal128 = new Decimal128Value("1.2e3"); + Decimal128Value decimal128Dup = new Decimal128Value("1.2e3"); + Decimal128Value differentDecimal128 = new Decimal128Value("1.3e3"); + assertEquals(decimal128, decimal128Dup); + assertNotEquals(decimal128, differentDecimal128); + assertEquals(decimal128.hashCode(), decimal128Dup.hashCode()); + + Decimal128Value dZeroPointFive = new Decimal128Value("0.5"); + Decimal128Value dHalf = new Decimal128Value(".5"); + Decimal128Value dFiveEminusOne = new Decimal128Value("5e-1"); + assertEquals(dZeroPointFive, dHalf); + assertEquals(dZeroPointFive.hashCode(), dHalf.hashCode()); + assertEquals(dZeroPointFive, dFiveEminusOne); + assertEquals(dZeroPointFive.hashCode(), dFiveEminusOne.hashCode()); + + Decimal128Value dOne = new Decimal128Value("1"); + Decimal128Value dOnePointZero = new Decimal128Value("1.0"); + Decimal128Value dOnePointZeroZero = new Decimal128Value("1.00"); + assertEquals(dOne, dOnePointZero); + assertEquals(dOne.hashCode(), dOnePointZero.hashCode()); + assertEquals(dOnePointZero, dOnePointZeroZero); + assertEquals(dOnePointZero.hashCode(), dOnePointZeroZero.hashCode()); + + // numerical equality with different string representation + Decimal128Value dTwelveHundred_1_2e3 = new Decimal128Value("1.2e3"); + Decimal128Value dTwelveHundred_12e2 = new Decimal128Value("12e2"); + Decimal128Value dTwelveHundred_0_12e4 = new Decimal128Value("0.12e4"); + Decimal128Value dTwelveHundred_12000eMinus1 = new Decimal128Value("12000e-1"); + Decimal128Value dOnePointTwo = new Decimal128Value("1.2"); + assertEquals(dTwelveHundred_1_2e3, dTwelveHundred_12e2); + assertEquals(dTwelveHundred_1_2e3.hashCode(), dTwelveHundred_12e2.hashCode()); + assertEquals(dTwelveHundred_1_2e3, dTwelveHundred_0_12e4); + assertEquals(dTwelveHundred_1_2e3.hashCode(), dTwelveHundred_0_12e4.hashCode()); + assertEquals(dTwelveHundred_1_2e3, dTwelveHundred_12000eMinus1); + assertEquals(dTwelveHundred_1_2e3.hashCode(), dTwelveHundred_12000eMinus1.hashCode()); + assertNotEquals(dTwelveHundred_1_2e3, dOnePointTwo); + + // Edge Cases: Zero + Decimal128Value positiveZero = new Decimal128Value("0"); + Decimal128Value negativeZero = new Decimal128Value("-0"); + Decimal128Value anotherPositiveZero = new Decimal128Value("+0"); + Decimal128Value zeroWithDecimal = new Decimal128Value("0.0"); + Decimal128Value negativeZeroWithDecimal = new Decimal128Value("-0.0"); + Decimal128Value positiveZeroWithDecimal = new Decimal128Value("+0.0"); + Decimal128Value zeroWithLeadingAndTrailingZeros = new Decimal128Value("00.00"); + Decimal128Value negativeZeroWithLeadingAndTrailingZeros = new Decimal128Value("-00.000"); + Decimal128Value negativeZeroWithExponent = new Decimal128Value("-00.000e-10"); + Decimal128Value negativeZeroWithZeroExponent = new Decimal128Value("-00.000e-0"); + Decimal128Value zeroWithExponent = new Decimal128Value("00.000e10"); + assertEquals(positiveZero, negativeZero); + assertEquals(positiveZero.hashCode(), negativeZero.hashCode()); + assertEquals(positiveZero, anotherPositiveZero); + assertEquals(positiveZero.hashCode(), anotherPositiveZero.hashCode()); + assertEquals(positiveZero, zeroWithDecimal); + assertEquals(positiveZero.hashCode(), zeroWithDecimal.hashCode()); + assertEquals(positiveZero, negativeZeroWithDecimal); + assertEquals(positiveZero.hashCode(), negativeZeroWithDecimal.hashCode()); + assertEquals(positiveZero, positiveZeroWithDecimal); + assertEquals(positiveZero.hashCode(), positiveZeroWithDecimal.hashCode()); + assertEquals(positiveZero, zeroWithLeadingAndTrailingZeros); + assertEquals(positiveZero.hashCode(), zeroWithLeadingAndTrailingZeros.hashCode()); + assertEquals(positiveZero, negativeZeroWithLeadingAndTrailingZeros); + assertEquals(positiveZero.hashCode(), negativeZeroWithLeadingAndTrailingZeros.hashCode()); + assertEquals(positiveZero, negativeZeroWithExponent); + assertEquals(positiveZero.hashCode(), negativeZeroWithExponent.hashCode()); + assertEquals(positiveZero, negativeZeroWithZeroExponent); + assertEquals(positiveZero.hashCode(), negativeZeroWithZeroExponent.hashCode()); + assertEquals(positiveZero, zeroWithExponent); + assertEquals(positiveZero.hashCode(), zeroWithExponent.hashCode()); + + // Infinity + Decimal128Value positiveInfinity = new Decimal128Value("Infinity"); + Decimal128Value negInfinity = new Decimal128Value("-Infinity"); + Decimal128Value anotherPositiveInfinity = new Decimal128Value("Infinity"); + assertEquals(positiveInfinity, anotherPositiveInfinity); + assertEquals(positiveInfinity.hashCode(), anotherPositiveInfinity.hashCode()); + assertNotEquals(positiveInfinity, negInfinity); + + // NaN + Decimal128Value nan1 = new Decimal128Value("NaN"); + Decimal128Value nan2 = new Decimal128Value("NaN"); + assertEquals(nan1, nan2); + assertEquals(nan1.hashCode(), nan2.hashCode()); + + assertNotEquals(nan1, dOne); + assertNotEquals(nan1, positiveInfinity); + + // Large Numbers + Decimal128Value largeNum1 = + new Decimal128Value("123456789012345678901234567890.123456789012345678901234567890"); + Decimal128Value largeNum2 = + new Decimal128Value("1.23456789012345678901234567890123456789012345678901234567890e29"); + assertEquals(largeNum1, largeNum2); + assertEquals(largeNum1.hashCode(), largeNum2.hashCode()); + + // Small Numbers + Decimal128Value smallNum1 = + new Decimal128Value( + "0.0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001"); + Decimal128Value smallNum2 = new Decimal128Value("1.0e-100"); + assertEquals(smallNum1, smallNum2); + assertEquals(smallNum1.hashCode(), smallNum2.hashCode()); } @Test @@ -140,7 +230,6 @@ public void testMinKeyMaxKeyNullNotEqual() { assertNotEquals(minKey, maxKey); assertNotEquals(minKey, null); assertNotEquals(maxKey, null); - assertNotEquals(minKey.hashCode(), maxKey.hashCode()); } @Test diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/FieldValueTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/FieldValueTest.java index a8837e06b1c..32fefe7de0f 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/FieldValueTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/FieldValueTest.java @@ -36,6 +36,8 @@ public void testEquals() { RegexValue regexDup = new RegexValue("pattern", "options"); Int32Value int32 = new Int32Value(1); Int32Value int32Dup = new Int32Value(1); + Decimal128Value decimal128 = new Decimal128Value("1.2e3"); + Decimal128Value decimal128Dup = new Decimal128Value("1.2e3"); BsonTimestamp bsonTimestamp = new BsonTimestamp(1, 2); BsonTimestamp bsonTimestampDup = new BsonTimestamp(1, 2); BsonObjectId bsonObjectId = new BsonObjectId("507f191e810c19729de860ea"); @@ -58,6 +60,7 @@ public void testEquals() { // BSON types assertEquals(regex, regexDup); assertEquals(int32, int32Dup); + assertEquals(decimal128, decimal128Dup); assertEquals(bsonTimestamp, bsonTimestampDup); assertEquals(bsonObjectId, bsonObjectIdDup); assertEquals(bsonBinary, bsonBinaryDup); @@ -67,18 +70,26 @@ public void testEquals() { // BSON types are not equal to each other assertNotEquals(regex, int32); + assertNotEquals(regex, decimal128); assertNotEquals(regex, bsonTimestamp); assertNotEquals(regex, bsonObjectId); assertNotEquals(regex, bsonBinary); assertNotEquals(regex, minKey); assertNotEquals(regex, maxKey); + assertNotEquals(int32, decimal128); assertNotEquals(int32, bsonTimestamp); assertNotEquals(int32, bsonObjectId); assertNotEquals(int32, bsonBinary); assertNotEquals(int32, minKey); assertNotEquals(int32, maxKey); + assertNotEquals(decimal128, bsonTimestamp); + assertNotEquals(decimal128, bsonObjectId); + assertNotEquals(decimal128, bsonBinary); + assertNotEquals(decimal128, minKey); + assertNotEquals(decimal128, maxKey); + assertNotEquals(bsonTimestamp, bsonObjectId); assertNotEquals(bsonTimestamp, bsonBinary); assertNotEquals(bsonTimestamp, minKey); @@ -93,6 +104,7 @@ public void testEquals() { // BSON types hash codes assertEquals(regex.hashCode(), regexDup.hashCode()); assertEquals(int32.hashCode(), int32Dup.hashCode()); + assertEquals(decimal128.hashCode(), decimal128Dup.hashCode()); assertEquals(bsonTimestamp.hashCode(), bsonTimestampDup.hashCode()); assertEquals(bsonObjectId.hashCode(), bsonObjectIdDup.hashCode()); assertEquals(bsonBinary.hashCode(), bsonBinaryDup.hashCode()); @@ -101,18 +113,26 @@ public void testEquals() { // BSON types hash codes are not equal to each other assertNotEquals(regex.hashCode(), int32.hashCode()); + assertNotEquals(regex.hashCode(), decimal128.hashCode()); assertNotEquals(regex.hashCode(), bsonTimestamp.hashCode()); assertNotEquals(regex.hashCode(), bsonObjectId.hashCode()); assertNotEquals(regex.hashCode(), bsonBinary.hashCode()); assertNotEquals(regex.hashCode(), minKey.hashCode()); assertNotEquals(regex.hashCode(), maxKey.hashCode()); + assertNotEquals(int32.hashCode(), decimal128.hashCode()); assertNotEquals(int32.hashCode(), bsonTimestamp.hashCode()); assertNotEquals(int32.hashCode(), bsonObjectId.hashCode()); assertNotEquals(int32.hashCode(), bsonBinary.hashCode()); assertNotEquals(int32.hashCode(), minKey.hashCode()); assertNotEquals(int32.hashCode(), maxKey.hashCode()); + assertNotEquals(decimal128.hashCode(), bsonTimestamp.hashCode()); + assertNotEquals(decimal128.hashCode(), bsonObjectId.hashCode()); + assertNotEquals(decimal128.hashCode(), bsonBinary.hashCode()); + assertNotEquals(decimal128.hashCode(), minKey.hashCode()); + assertNotEquals(decimal128.hashCode(), maxKey.hashCode()); + assertNotEquals(bsonTimestamp.hashCode(), bsonObjectId.hashCode()); assertNotEquals(bsonTimestamp.hashCode(), bsonBinary.hashCode()); assertNotEquals(bsonTimestamp.hashCode(), minKey.hashCode()); diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/UserDataWriterTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/UserDataWriterTest.java index e6a12ee3e95..942ec99ee28 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/UserDataWriterTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/UserDataWriterTest.java @@ -276,6 +276,26 @@ public void testConvertsInt32Value() { } } + @Test + public void testConvertsDecimal128Value() { + List testCases = + asList( + new Decimal128Value("-1.2e3"), + new Decimal128Value("1.2e3"), + new Decimal128Value("1.3e3"), + new Decimal128Value("NaN"), + new Decimal128Value("-Infinity"), + new Decimal128Value("Infinity"), + new Decimal128Value("4.2e+3"), + new Decimal128Value("-4.2e-3"), + new Decimal128Value("-0")); + for (Decimal128Value p : testCases) { + Value value = wrap(p); + Object convertedValue = convertValue(value); + assertEquals(p, convertedValue); + } + } + @Test public void testConvertsMinKey() { List testCases = asList(MinKey.instance(), MinKey.instance()); diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/bundle/BundleSerializerTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/bundle/BundleSerializerTest.java index 6e0186adac8..3983a08c0e2 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/bundle/BundleSerializerTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/bundle/BundleSerializerTest.java @@ -295,6 +295,18 @@ public void testDecodesInt32Values() throws JSONException { assertDecodesValue(json, proto.build()); } + @Test + public void testDecodesDecimal128Values() throws JSONException { + String json = "{ mapValue: { fields: { __decimal128__: { stringValue: '1.2e3' } } } }"; + Value.Builder proto = Value.newBuilder(); + proto.setMapValue( + MapValue.newBuilder() + .putFields( + Values.RESERVED_DECIMAL128_KEY, + Value.newBuilder().setStringValue("1.2e3").build())); + assertDecodesValue(json, proto.build()); + } + @Test public void testDecodesMinKey() throws JSONException { String json = "{ mapValue: { fields: { __min__: { nullValue: null } } } }"; diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/core/QueryTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/core/QueryTest.java index 0f8e6c22389..65390aa8054 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/core/QueryTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/core/QueryTest.java @@ -37,6 +37,7 @@ import com.google.firebase.firestore.BsonBinaryData; import com.google.firebase.firestore.BsonObjectId; import com.google.firebase.firestore.BsonTimestamp; +import com.google.firebase.firestore.Decimal128Value; import com.google.firebase.firestore.GeoPoint; import com.google.firebase.firestore.Int32Value; import com.google.firebase.firestore.MaxKey; @@ -863,6 +864,10 @@ public void testCanonicalIdsAreStable() { assertCanonicalId( baseQuery.filter(filter("a", "<=", new Int32Value(1))), "collection|f:a<={__int__:1}|ob:aasc__name__asc"); + assertCanonicalId( + baseQuery.filter(filter("a", "<=", new Decimal128Value("1.2e3"))), + "collection|f:a<={__decimal128__:1.2e3}|ob:aasc__name__asc"); + assertCanonicalId( baseQuery.filter(filter("a", "<=", MinKey.instance())), "collection|f:a<={__min__:null}|ob:aasc__name__asc"); diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/index/FirestoreIndexValueWriterTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/index/FirestoreIndexValueWriterTest.java index 6f1eea8cbbb..318e9137ee1 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/index/FirestoreIndexValueWriterTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/index/FirestoreIndexValueWriterTest.java @@ -340,6 +340,114 @@ public void writeIndexValueSupportsSmallestInt32() Assert.assertArrayEquals(actualBytes, expectedBytes); } + @Test + public void writeIndexValueSupportsDecimal128() throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new Decimal128Value("1.2e3")); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_NUMBER); // Number type + expectedDirectionalEncoder.writeDouble(Double.parseDouble("1.2e3")); // Number value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsNegativeDecimal128() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new Decimal128Value("-1.2e3")); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_NUMBER); // Number type + expectedDirectionalEncoder.writeDouble(Double.parseDouble("-1.2e3")); // Number value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsSpecialDecimal128() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new Decimal128Value("NaN")); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_NAN); // Number type, special case NaN + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsLargestDecimal128() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new Decimal128Value("Infinity")); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_NUMBER); // Number type + expectedDirectionalEncoder.writeDouble(Double.parseDouble("Infinity")); // Number value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + + @Test + public void writeIndexValueSupportsSmallestDecimal128() + throws ExecutionException, InterruptedException { + UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); + Value value = dataReader.parseQueryValue(new Decimal128Value("-Infinity")); + IndexByteEncoder encoder = new IndexByteEncoder(); + FirestoreIndexValueWriter.INSTANCE.writeIndexValue( + value, encoder.forKind(FieldIndex.Segment.Kind.ASCENDING)); + byte[] actualBytes = encoder.getEncodedBytes(); + + IndexByteEncoder expectedEncoder = new IndexByteEncoder(); + DirectionalIndexByteEncoder expectedDirectionalEncoder = + expectedEncoder.forKind(FieldIndex.Segment.Kind.ASCENDING); + expectedDirectionalEncoder.writeLong( + FirestoreIndexValueWriter.INDEX_TYPE_NUMBER); // Number type + expectedDirectionalEncoder.writeDouble(Double.parseDouble("-Infinity")); // Number value + expectedDirectionalEncoder.writeInfinity(); + byte[] expectedBytes = expectedEncoder.getEncodedBytes(); + + Assert.assertArrayEquals(actualBytes, expectedBytes); + } + @Test public void writeIndexValueSupportsMinKey() throws ExecutionException, InterruptedException { UserDataReader dataReader = new UserDataReader(DatabaseId.EMPTY); diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteIndexManagerTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteIndexManagerTest.java index fce0404c342..b0b4c7e9eb7 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteIndexManagerTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteIndexManagerTest.java @@ -42,6 +42,8 @@ import com.google.firebase.firestore.BsonBinaryData; import com.google.firebase.firestore.BsonObjectId; import com.google.firebase.firestore.BsonTimestamp; +import com.google.firebase.firestore.Decimal128Value; +import com.google.firebase.firestore.FieldValue; import com.google.firebase.firestore.GeoPoint; import com.google.firebase.firestore.Int32Value; import com.google.firebase.firestore.MaxKey; @@ -1441,6 +1443,92 @@ public void testIndexesInt32() { verifyResults(query); } + @Test + public void testIndexesDecimal128Value() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + + addDoc("coll/doc1", map("key", new Decimal128Value("-1.2e3"))); + addDoc("coll/doc2", map("key", new Decimal128Value("0.0"))); + addDoc("coll/doc3", map("key", new Decimal128Value("1.2e3"))); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new Decimal128Value("-1200"))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", "!=", new Decimal128Value("0"))); + verifyResults(query, "coll/doc1", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", new Decimal128Value("-0"))); + verifyResults(query, "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", new Decimal128Value("-0.0"))); + verifyResults(query, "coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", new Decimal128Value("1.2e-3"))); + verifyResults(query, "coll/doc3"); + + query = query("coll").filter(filter("key", "<", new Decimal128Value("-1.2e-3"))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", ">", new Decimal128Value("1.2e3"))); + verifyResults(query); + + query = query("coll").filter(filter("key", "<", new Decimal128Value("-1.2e3"))); + verifyResults(query); + } + + @Test + public void testIndexesDecimal128ValueWithPrecisionLoss() { + indexManager.addFieldIndex( + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING)); + + addDoc( + "coll/doc1", + map( + "key", + new Decimal128Value( + "-0.1234567890123456789"))); // will be rounded to -0.12345678901234568 + addDoc("coll/doc2", map("key", new Decimal128Value("0"))); + addDoc( + "coll/doc3", + map( + "key", + new Decimal128Value( + "0.1234567890123456789"))); // will be rounded to 0.12345678901234568 + + Query query = query("coll").orderBy(orderBy("key", "asc")); + verifyResults(query, "coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new Decimal128Value("0.1234567890123456789"))); + verifyResults(query, "coll/doc3"); + + // Mismatch behaviour caused by rounding error. Firestore fetches the doc3 from SQLite DB as + // doc3 rounds to the same number, even though the actual number in doc3 is different + // Unlike SQLiteLocalStoreTest, this returns the doc3 as result + query = query("coll").filter(filter("key", "==", new Decimal128Value("0.12345678901234568"))); + verifyResults(query, "coll/doc3"); + + // Operations that doesn't go up to 17 decimal digits of precision wouldn't be affected by + // this rounding errors. + query = query("coll").filter(filter("key", "!=", new Decimal128Value("0.0"))); + verifyResults(query, "coll/doc1", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", new Decimal128Value("1.23e-1"))); + verifyResults(query, "coll/doc3"); + + query = query("coll").filter(filter("key", "<=", new Decimal128Value("-1.23e-1"))); + verifyResults(query, "coll/doc1"); + + query = query("coll").filter(filter("key", ">", new Decimal128Value("1.2e3"))); + verifyResults(query); + + query = query("coll").filter(filter("key", "<", new Decimal128Value("-1.2e3"))); + verifyResults(query); + } + @Test public void testIndexesMinKey() { indexManager.addFieldIndex( @@ -1511,31 +1599,35 @@ public void testIndexFieldsOfBsonTypesTogether() { addDoc("coll/doc1", map("key", MinKey.instance())); addDoc("coll/doc2", map("key", new Int32Value(2))); - addDoc("coll/doc3", map("key", new Int32Value(1))); - addDoc("coll/doc4", map("key", new BsonTimestamp(1, 2))); - addDoc("coll/doc5", map("key", new BsonTimestamp(1, 1))); - addDoc("coll/doc6", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}))); - addDoc("coll/doc7", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); - addDoc("coll/doc8", map("key", new BsonObjectId("507f191e810c19729de860eb"))); - addDoc("coll/doc9", map("key", new BsonObjectId("507f191e810c19729de860ea"))); - addDoc("coll/doc10", map("key", new RegexValue("a", "m"))); - addDoc("coll/doc11", map("key", new RegexValue("a", "i"))); - addDoc("coll/doc12", map("key", MaxKey.instance())); + addDoc("coll/doc3", map("key", new Int32Value(-1))); + addDoc("coll/doc4", map("key", new Decimal128Value("1.2e3"))); + addDoc("coll/doc5", map("key", new Decimal128Value("-0.0"))); + addDoc("coll/doc6", map("key", new BsonTimestamp(1, 2))); + addDoc("coll/doc7", map("key", new BsonTimestamp(1, 1))); + addDoc("coll/doc8", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4}))); + addDoc("coll/doc9", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + addDoc("coll/doc10", map("key", new BsonObjectId("507f191e810c19729de860eb"))); + addDoc("coll/doc11", map("key", new BsonObjectId("507f191e810c19729de860ea"))); + addDoc("coll/doc12", map("key", new RegexValue("a", "m"))); + addDoc("coll/doc13", map("key", new RegexValue("a", "i"))); + addDoc("coll/doc14", map("key", MaxKey.instance())); Query query = query("coll").orderBy(orderBy("key", "desc")); verifyResults( query, - "coll/doc12", // maxKey - "coll/doc10", // regex m - "coll/doc11", // regex i - "coll/doc8", // objectId eb - "coll/doc9", // objectId ea - "coll/doc6", // binary [1,2,4] - "coll/doc7", // binary [1,2,3] - "coll/doc4", // timestamp 1,2 - "coll/doc5", // timestamp 1,1 + "coll/doc14", // maxKey + "coll/doc12", // regex m + "coll/doc13", // regex i + "coll/doc10", // objectId eb + "coll/doc11", // objectId ea + "coll/doc8", // binary [1,2,4] + "coll/doc9", // binary [1,2,3] + "coll/doc6", // timestamp 1,2 + "coll/doc7", // timestamp 1,1 + "coll/doc4", // decimal128 1200 "coll/doc2", // int32 2 - "coll/doc3", // int32 1 + "coll/doc5", // decimal128 -0.0 + "coll/doc3", // int32 -1 "coll/doc1" // minKey ); } @@ -1551,39 +1643,42 @@ public void testIndexFieldsOfAllTypesTogether() { addDoc("coll/e", map("key", new Int32Value(1))); addDoc("coll/f", map("key", 2.0)); addDoc("coll/g", map("key", 3L)); - addDoc("coll/h", map("key", new Timestamp(100, 123456000))); - addDoc("coll/i", map("key", new BsonTimestamp(1, 2))); - addDoc("coll/j", map("key", "string")); - addDoc("coll/k", map("key", blob(1, 2, 3))); - addDoc("coll/l", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); - addDoc("coll/m", map("key", ref("foo/bar"))); - addDoc("coll/n", map("key", new BsonObjectId("507f191e810c19729de860ea"))); - addDoc("coll/o", map("key", new GeoPoint(0, 1))); - addDoc("coll/p", map("key", new RegexValue("^foo", "i"))); - addDoc("coll/q", map("key", Arrays.asList(1, 2))); - // Note: Vector type not available in Java SDK, skipping 'r' - addDoc("coll/s", map("key", map("a", 1))); - addDoc("coll/t", map("key", MaxKey.instance())); + addDoc("coll/h", map("key", new Decimal128Value("1.2e3"))); + addDoc("coll/i", map("key", new Timestamp(100, 123456000))); + addDoc("coll/j", map("key", new BsonTimestamp(1, 2))); + addDoc("coll/k", map("key", "string")); + addDoc("coll/l", map("key", blob(1, 2, 3))); + addDoc("coll/m", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3}))); + addDoc("coll/n", map("key", ref("foo/bar"))); + addDoc("coll/o", map("key", new BsonObjectId("507f191e810c19729de860ea"))); + addDoc("coll/p", map("key", new GeoPoint(0, 1))); + addDoc("coll/q", map("key", new RegexValue("^foo", "i"))); + addDoc("coll/r", map("key", Arrays.asList(1, 2))); + addDoc("coll/s", map("key", FieldValue.vector(new double[] {1, 2, 3}))); + addDoc("coll/t", map("key", map("a", 1))); + addDoc("coll/u", map("key", MaxKey.instance())); Query query = query("coll").orderBy(orderBy("key", "desc")); verifyResults( query, - "coll/t", // maxKey - "coll/s", // map - "coll/q", // array - "coll/p", // regex - "coll/o", // geopoint - "coll/n", // objectId - "coll/m", // reference - "coll/l", // bsonBinary - "coll/k", // bytes - "coll/j", // string - "coll/i", // bsonTimestamp - "coll/h", // timestamp - "coll/g", // long - "coll/f", // double - "coll/e", // int32 - "coll/d", // NaN + "coll/u", // maxKey + "coll/t", // map + "coll/s", // vector + "coll/r", // array + "coll/q", // regex + "coll/p", // geopoint + "coll/o", // objectId + "coll/n", // reference + "coll/m", // bsonBinary + "coll/l", // bytes + "coll/k", // string + "coll/j", // bsonTimestamp + "coll/i", // timestamp + "coll/h", // Number decimal128 + "coll/g", // Number long + "coll/f", // Number double + "coll/e", // Number int32 + "coll/d", // Number NaN "coll/c", // boolean "coll/b", // minKey "coll/a" // null diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteLocalStoreTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteLocalStoreTest.java index 1edbef0474c..bc8fcc31f4b 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteLocalStoreTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/local/SQLiteLocalStoreTest.java @@ -39,6 +39,7 @@ import com.google.firebase.firestore.BsonBinaryData; import com.google.firebase.firestore.BsonObjectId; import com.google.firebase.firestore.BsonTimestamp; +import com.google.firebase.firestore.Decimal128Value; import com.google.firebase.firestore.FieldValue; import com.google.firebase.firestore.GeoPoint; import com.google.firebase.firestore.Int32Value; @@ -848,6 +849,189 @@ public void testIndexesInt32() { assertQueryReturned("coll/doc1", "coll/doc2"); } + @Test + public void testIndexesDecimal128Value() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + writeMutation(setMutation("coll/doc1", map("key", new Decimal128Value("-1.2e3")))); + writeMutation(setMutation("coll/doc2", map("key", new Decimal128Value("0")))); + writeMutation(setMutation("coll/doc3", map("key", new Decimal128Value("1.2e3")))); + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new Decimal128Value("-1200"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1"); + + query = query("coll").filter(filter("key", "!=", new Decimal128Value("0.0"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", new Decimal128Value("-0"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc2", "coll/doc3"); + + // This will fail if the negative 0s are not converted to positive 0 in `writeIndexValue` + // function + query = query("coll").filter(filter("key", "<=", new Decimal128Value("-0.0"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2"); + + query = query("coll").filter(filter("key", ">", new Decimal128Value("1.2e3"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = query("coll").filter(filter("key", "<", new Decimal128Value("-1.2e3"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = + query("coll") + .filter( + filter( + "key", + "in", + Arrays.asList(new Decimal128Value("-1.2e3"), new Decimal128Value("0")))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2"); + } + + @Test + public void testIndexesDecimal128ValueWithPrecisionLoss() { + FieldIndex index = + fieldIndex("coll", 0, FieldIndex.INITIAL_STATE, "key", FieldIndex.Segment.Kind.ASCENDING); + configureFieldIndexes(singletonList(index)); + writeMutation( + setMutation( + "coll/doc1", + map( + "key", + new Decimal128Value( + "-0.1234567890123456789")))); // will be rounded to -0.12345678901234568 + writeMutation(setMutation("coll/doc2", map("key", new Decimal128Value("0")))); + writeMutation( + setMutation( + "coll/doc3", + map( + "key", + new Decimal128Value( + "0.1234567890123456789")))); // will be rounded to 0.12345678901234568 + + backfillIndexes(); + + Query query = query("coll").orderBy(orderBy("key", "asc")); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 3, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc2", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc2", "coll/doc3"); + + query = query("coll").filter(filter("key", "==", new Decimal128Value("0.1234567890123456789"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc3", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc3"); + + // Mismatch behaviour caused by rounding error. Firestore fetches the doc3 from SQLite DB as + // doc3 rounds to the same number, but, it is not presented on the final query result. + query = query("coll").filter(filter("key", "==", new Decimal128Value("0.12345678901234568"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc3", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned(); + + // Operations that doesn't go up to 17 decimal digits of precision wouldn't be affected by + // this rounding errors. + query = query("coll").filter(filter("key", "!=", new Decimal128Value("0.0"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 2, /* byCollection= */ 0); + assertOverlayTypes( + keyMap( + "coll/doc1", + CountingQueryEngine.OverlayType.Set, + "coll/doc3", + CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1", "coll/doc3"); + + query = query("coll").filter(filter("key", ">=", new Decimal128Value("1.23e-1"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc3", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc3"); + + query = query("coll").filter(filter("key", "<=", new Decimal128Value("-1.23e-1"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 1, /* byCollection= */ 0); + assertOverlayTypes(keyMap("coll/doc1", CountingQueryEngine.OverlayType.Set)); + assertQueryReturned("coll/doc1"); + + query = query("coll").filter(filter("key", ">", new Decimal128Value("1.2e3"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + + query = query("coll").filter(filter("key", "<", new Decimal128Value("-1.2e3"))); + executeQuery(query); + assertOverlaysRead(/* byKey= */ 0, /* byCollection= */ 0); + assertOverlayTypes(keyMap()); + assertQueryReturned(); + } + @Test public void testIndexesMinKey() { FieldIndex index = @@ -1047,26 +1231,28 @@ public void testIndexesAllBsonTypesTogether() { writeMutation(setMutation("coll/doc1", map("key", MinKey.instance()))); writeMutation(setMutation("coll/doc2", map("key", new Int32Value(2)))); - writeMutation(setMutation("coll/doc3", map("key", new Int32Value(1)))); - writeMutation(setMutation("coll/doc4", map("key", new BsonTimestamp(1000, 1001)))); - writeMutation(setMutation("coll/doc5", map("key", new BsonTimestamp(1000, 1000)))); + writeMutation(setMutation("coll/doc3", map("key", new Int32Value(-1)))); + writeMutation(setMutation("coll/doc4", map("key", new Decimal128Value("1.2e3")))); + writeMutation(setMutation("coll/doc5", map("key", new Decimal128Value("-0.0")))); + writeMutation(setMutation("coll/doc6", map("key", new BsonTimestamp(1000, 1001)))); + writeMutation(setMutation("coll/doc7", map("key", new BsonTimestamp(1000, 1000)))); writeMutation( - setMutation("coll/doc6", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})))); + setMutation("coll/doc8", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})))); writeMutation( - setMutation("coll/doc7", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})))); + setMutation("coll/doc9", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})))); writeMutation( - setMutation("coll/doc8", map("key", new BsonObjectId("507f191e810c19729de860eb")))); + setMutation("coll/doc10", map("key", new BsonObjectId("507f191e810c19729de860eb")))); writeMutation( - setMutation("coll/doc9", map("key", new BsonObjectId("507f191e810c19729de860ea")))); - writeMutation(setMutation("coll/doc10", map("key", new RegexValue("^bar", "m")))); - writeMutation(setMutation("coll/doc11", map("key", new RegexValue("^bar", "i")))); - writeMutation(setMutation("coll/doc12", map("key", MaxKey.instance()))); + setMutation("coll/doc11", map("key", new BsonObjectId("507f191e810c19729de860ea")))); + writeMutation(setMutation("coll/doc12", map("key", new RegexValue("^bar", "m")))); + writeMutation(setMutation("coll/doc13", map("key", new RegexValue("^bar", "i")))); + writeMutation(setMutation("coll/doc14", map("key", MaxKey.instance()))); backfillIndexes(); Query query = query("coll").orderBy(orderBy("key", "desc")); executeQuery(query); - assertOverlaysRead(/* byKey= */ 12, /* byCollection= */ 0); + assertOverlaysRead(/* byKey= */ 14, /* byCollection= */ 0); assertOverlayTypes( keyMap( "coll/doc1", @@ -1092,20 +1278,27 @@ public void testIndexesAllBsonTypesTogether() { "coll/doc11", CountingQueryEngine.OverlayType.Set, "coll/doc12", + CountingQueryEngine.OverlayType.Set, + "coll/doc13", + CountingQueryEngine.OverlayType.Set, + "coll/doc14", CountingQueryEngine.OverlayType.Set)); assertQueryReturned( - "coll/doc12", - "coll/doc10", - "coll/doc11", - "coll/doc8", - "coll/doc9", - "coll/doc6", - "coll/doc7", - "coll/doc4", - "coll/doc5", - "coll/doc2", - "coll/doc3", - "coll/doc1"); + "coll/doc14", // maxKey + "coll/doc12", // regex m + "coll/doc13", // regex i + "coll/doc10", // objectId eb + "coll/doc11", // objectId ea + "coll/doc8", // binary [1,2,4] + "coll/doc9", // binary [1,2,3] + "coll/doc6", // timestamp 1,2 + "coll/doc7", // timestamp 1,1 + "coll/doc4", // decimal128 1200 + "coll/doc2", // int32 2 + "coll/doc5", // decimal128 -0.0 + "coll/doc3", // int32 -1 + "coll/doc1" // minKey + ); } @Test @@ -1121,27 +1314,28 @@ public void testIndexesAllTypesTogether() { writeMutation(setMutation("coll/doc5", map("key", new Int32Value(1)))); writeMutation(setMutation("coll/doc6", map("key", 2.0))); writeMutation(setMutation("coll/doc7", map("key", 3))); - writeMutation(setMutation("coll/doc8", map("key", new Timestamp(100, 123456000)))); - writeMutation(setMutation("coll/doc9", map("key", new BsonTimestamp(1, 2)))); - writeMutation(setMutation("coll/doc10", map("key", "string"))); - writeMutation(setMutation("coll/doc11", map("key", blob(1, 2, 3)))); + writeMutation(setMutation("coll/doc8", map("key", new Decimal128Value("1.2e3")))); + writeMutation(setMutation("coll/doc9", map("key", new Timestamp(100, 123456000)))); + writeMutation(setMutation("coll/doc10", map("key", new BsonTimestamp(1, 2)))); + writeMutation(setMutation("coll/doc11", map("key", "string"))); + writeMutation(setMutation("coll/doc12", map("key", blob(1, 2, 3)))); writeMutation( - setMutation("coll/doc12", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})))); - writeMutation(setMutation("coll/doc13", map("key", ref("foo/bar")))); + setMutation("coll/doc13", map("key", BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})))); + writeMutation(setMutation("coll/doc14", map("key", ref("foo/bar")))); writeMutation( - setMutation("coll/doc14", map("key", new BsonObjectId("507f191e810c19729de860ea")))); - writeMutation(setMutation("coll/doc15", map("key", new GeoPoint(1, 2)))); - writeMutation(setMutation("coll/doc16", map("key", new RegexValue("^bar", "m")))); - writeMutation(setMutation("coll/doc17", map("key", Arrays.asList(2, "foo")))); - writeMutation(setMutation("coll/doc18", map("key", FieldValue.vector(new double[] {1, 2, 3})))); - writeMutation(setMutation("coll/doc19", map("key", map("bar", 1, "foo", 2)))); - writeMutation(setMutation("coll/doc20", map("key", MaxKey.instance()))); + setMutation("coll/doc15", map("key", new BsonObjectId("507f191e810c19729de860ea")))); + writeMutation(setMutation("coll/doc16", map("key", new GeoPoint(1, 2)))); + writeMutation(setMutation("coll/doc17", map("key", new RegexValue("^bar", "m")))); + writeMutation(setMutation("coll/doc18", map("key", Arrays.asList(2, "foo")))); + writeMutation(setMutation("coll/doc19", map("key", FieldValue.vector(new double[] {1, 2, 3})))); + writeMutation(setMutation("coll/doc20", map("key", map("bar", 1, "foo", 2)))); + writeMutation(setMutation("coll/doc21", map("key", MaxKey.instance()))); backfillIndexes(); Query query = query("coll").orderBy(orderBy("key", "asc")); executeQuery(query); - assertOverlaysRead(/* byKey= */ 20, /* byCollection= */ 0); + assertOverlaysRead(/* byKey= */ 21, /* byCollection= */ 0); assertOverlayTypes( keyMap( "coll/doc1", @@ -1183,6 +1377,8 @@ public void testIndexesAllTypesTogether() { "coll/doc19", CountingQueryEngine.OverlayType.Set, "coll/doc20", + CountingQueryEngine.OverlayType.Set, + "coll/doc21", CountingQueryEngine.OverlayType.Set)); assertQueryReturned( "coll/doc1", @@ -1204,7 +1400,8 @@ public void testIndexesAllTypesTogether() { "coll/doc17", "coll/doc18", "coll/doc19", - "coll/doc20"); + "coll/doc20", + "coll/doc21"); } @Test diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java index 66b0ff937d7..5530c457847 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/model/ValuesTest.java @@ -31,6 +31,7 @@ import com.google.firebase.firestore.BsonBinaryData; import com.google.firebase.firestore.BsonObjectId; import com.google.firebase.firestore.BsonTimestamp; +import com.google.firebase.firestore.Decimal128Value; import com.google.firebase.firestore.FieldValue; import com.google.firebase.firestore.GeoPoint; import com.google.firebase.firestore.Int32Value; @@ -90,6 +91,10 @@ public void testValueEquality() { Int32Value int32Value1 = new Int32Value(1); Int32Value int32Value2 = new Int32Value(2); + Decimal128Value decimal128Value1 = new Decimal128Value("-1.2e3"); + Decimal128Value decimal128Value2 = new Decimal128Value("0.0"); + Decimal128Value decimal128Value3 = new Decimal128Value("1.2e-3"); + RegexValue regexValue1 = new RegexValue("^foo", "i"); RegexValue regexValue2 = new RegexValue("^foo", "m"); RegexValue regexValue3 = new RegexValue("^bar", "i"); @@ -152,6 +157,9 @@ public void testValueEquality() { .addEqualityGroup(wrap(new BsonTimestamp(2, 2)), wrap(bsonTimestamp3)) .addEqualityGroup(wrap(new Int32Value(1)), wrap(int32Value1)) .addEqualityGroup(wrap(new Int32Value(2)), wrap(int32Value2)) + .addEqualityGroup(wrap(new Decimal128Value("-1.2e3")), wrap(decimal128Value1)) + .addEqualityGroup(wrap(new Decimal128Value("0.0")), wrap(decimal128Value2)) + .addEqualityGroup(wrap(new Decimal128Value("1.2e-3")), wrap(decimal128Value3)) .addEqualityGroup(wrap(new RegexValue("^foo", "i")), wrap(regexValue1)) .addEqualityGroup(wrap(new RegexValue("^foo", "m")), wrap(regexValue2)) .addEqualityGroup(wrap(new RegexValue("^bar", "i")), wrap(regexValue3)) @@ -177,27 +185,50 @@ public void testValueOrdering() { .addEqualityGroup(wrap(true)) // 64-bit and 32-bit numbers order together numerically. - .addEqualityGroup(wrap(Double.NaN)) - .addEqualityGroup(wrap(Double.NEGATIVE_INFINITY)) + .addEqualityGroup(wrap(Double.NaN), wrap(new Decimal128Value("NaN"))) + .addEqualityGroup(wrap(Double.NEGATIVE_INFINITY), wrap(new Decimal128Value("-Infinity"))) .addEqualityGroup(wrap(-Double.MAX_VALUE)) - .addEqualityGroup(wrap(Long.MIN_VALUE)) - .addEqualityGroup(wrap(new Int32Value(-2147483648)), wrap(Integer.MIN_VALUE)) - .addEqualityGroup(wrap(-1.1)) - .addEqualityGroup(wrap(-1.0)) + .addEqualityGroup(wrap(Long.MIN_VALUE), wrap(new Decimal128Value("-9223372036854775808"))) + .addEqualityGroup( + wrap(new Int32Value(-2147483648)), + wrap(Integer.MIN_VALUE), + wrap(new Decimal128Value("-2147483648"))) + // Note: decimal 128 would have equality issue with other number types if the value doesn't + // have a 2's complement representation, e.g, 1.1. This is expected. + .addEqualityGroup(wrap(-1.5), wrap(new Decimal128Value("-1.5"))) + .addEqualityGroup(wrap(-1.0), wrap(new Decimal128Value("-1.0"))) .addEqualityGroup(wrap(-Double.MIN_NORMAL)) .addEqualityGroup(wrap(-Double.MIN_VALUE)) // Zeros all compare the same. - .addEqualityGroup(wrap(-0.0), wrap(0.0), wrap(0L), wrap(new Int32Value(0))) + .addEqualityGroup( + wrap(-0.0), + wrap(0.0), + wrap(0L), + wrap(new Int32Value(0)), + wrap(new Decimal128Value("0")), + wrap(new Decimal128Value("0.0")), + wrap(new Decimal128Value("-0")), + wrap(new Decimal128Value("-0.0")), + wrap(new Decimal128Value("+0")), + wrap(new Decimal128Value("+0.0"))) .addEqualityGroup(wrap(Double.MIN_VALUE)) .addEqualityGroup(wrap(Double.MIN_NORMAL)) - .addEqualityGroup(wrap(0.1)) + .addEqualityGroup(wrap(0.5), wrap(new Decimal128Value("0.5"))) // Doubles, Longs, Int32Values compareTo() the same. - .addEqualityGroup(wrap(1.0), wrap(1L), wrap(new Int32Value(1))) + .addEqualityGroup( + wrap(1.0), + wrap(1L), + wrap(new Int32Value(1)), + wrap(new Decimal128Value("1")), + wrap(new Decimal128Value("1.0"))) .addEqualityGroup(wrap(1.1)) - .addEqualityGroup(wrap(new Int32Value(2147483647)), wrap(Integer.MAX_VALUE)) + .addEqualityGroup( + wrap(new Int32Value(2147483647)), + wrap(Integer.MAX_VALUE), + wrap(new Decimal128Value("2.147483647e9"))) .addEqualityGroup(wrap(Long.MAX_VALUE)) .addEqualityGroup(wrap(Double.MAX_VALUE)) - .addEqualityGroup(wrap(Double.POSITIVE_INFINITY)) + .addEqualityGroup(wrap(Double.POSITIVE_INFINITY), wrap(new Decimal128Value("Infinity"))) // dates .addEqualityGroup(wrap(date1)) @@ -320,10 +351,12 @@ public void testLowerBound() { .addEqualityGroup(wrap(true)) // numbers + // Note: 32-bit,64-bit integers and 128-bit decimals shares the same lower bound .addEqualityGroup( wrap(getLowerBound(TestUtil.wrap(1.0))), wrap(Double.NaN), - wrap(getLowerBound(TestUtil.wrap(new Int32Value(1))))) + wrap(getLowerBound(TestUtil.wrap(new Int32Value(1)))), + wrap(getLowerBound(TestUtil.wrap(new Decimal128Value("1"))))) .addEqualityGroup(wrap(Double.NEGATIVE_INFINITY)) .addEqualityGroup(wrap(Long.MIN_VALUE)) @@ -413,13 +446,16 @@ public void testUpperBound() { // booleans .addEqualityGroup(wrap(true)) - .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(false)))) + .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(false))), wrap(Double.NaN)) // numbers .addEqualityGroup(wrap(new Int32Value(2147483647))) // largest int32 value .addEqualityGroup(wrap(Long.MAX_VALUE)) .addEqualityGroup(wrap(Double.POSITIVE_INFINITY)) - .addEqualityGroup(wrap(getUpperBound(TestUtil.wrap(1.0)))) + .addEqualityGroup( + wrap(getUpperBound(TestUtil.wrap(0))), + wrap(getUpperBound(TestUtil.wrap(new Int32Value(0)))), + wrap(getUpperBound(TestUtil.wrap(new Decimal128Value("-0.0"))))) // dates .addEqualityGroup(wrap(date1)) @@ -505,6 +541,7 @@ public void testCanonicalIds() { assertCanonicalId( TestUtil.wrap(new BsonTimestamp(1, 2)), "{__request_timestamp__:{increment:2,seconds:1}}"); assertCanonicalId((TestUtil.wrap(new Int32Value(1))), "{__int__:1}"); + assertCanonicalId(TestUtil.wrap(new Decimal128Value("1.2e3")), "{__decimal128__:1.2e3}"); assertCanonicalId( TestUtil.wrap(BsonBinaryData.fromBytes(1, new byte[] {1, 2, 3})), "{__binary__:01010203}"); assertCanonicalId( @@ -529,78 +566,97 @@ public void DetectsBsonTypesCorrectly() { Value minKeyValue = TestUtil.wrap(MinKey.instance()); Value maxKeyValue = TestUtil.wrap(MaxKey.instance()); Value int32Value = TestUtil.wrap(new Int32Value(1)); + Value decimal128 = TestUtil.wrap(new Decimal128Value("1.2e3")); Value regexValue = TestUtil.wrap(new RegexValue("^foo", "i")); Value bsonTimestampValue = TestUtil.wrap(new BsonTimestamp(1, 2)); Value bsonObjectIdValue = TestUtil.wrap(new BsonObjectId("foo")); Value bsonBinaryDataValue1 = TestUtil.wrap(BsonBinaryData.fromBytes(1, new byte[] {})); Value bsonBinaryDataValue2 = TestUtil.wrap(BsonBinaryData.fromBytes(1, new byte[] {1, 2, 4})); - assertTrue(Values.isMinKey(minKeyValue.getMapValue().getFieldsMap())); - assertFalse(Values.isMinKey(maxKeyValue.getMapValue().getFieldsMap())); - assertFalse(Values.isMinKey(int32Value.getMapValue().getFieldsMap())); - assertFalse(Values.isMinKey(regexValue.getMapValue().getFieldsMap())); - assertFalse(Values.isMinKey(bsonTimestampValue.getMapValue().getFieldsMap())); - assertFalse(Values.isMinKey(bsonObjectIdValue.getMapValue().getFieldsMap())); - assertFalse(Values.isMinKey(bsonBinaryDataValue1.getMapValue().getFieldsMap())); - assertFalse(Values.isMinKey(bsonBinaryDataValue2.getMapValue().getFieldsMap())); - - assertFalse(Values.isMaxKey(minKeyValue.getMapValue().getFieldsMap())); - assertTrue(Values.isMaxKey(maxKeyValue.getMapValue().getFieldsMap())); - assertFalse(Values.isMaxKey(int32Value.getMapValue().getFieldsMap())); - assertFalse(Values.isMaxKey(regexValue.getMapValue().getFieldsMap())); - assertFalse(Values.isMaxKey(bsonTimestampValue.getMapValue().getFieldsMap())); - assertFalse(Values.isMaxKey(bsonObjectIdValue.getMapValue().getFieldsMap())); - assertFalse(Values.isMaxKey(bsonBinaryDataValue1.getMapValue().getFieldsMap())); - assertFalse(Values.isMaxKey(bsonBinaryDataValue2.getMapValue().getFieldsMap())); - - assertFalse(Values.isInt32Value(minKeyValue.getMapValue().getFieldsMap())); - assertFalse(Values.isInt32Value(maxKeyValue.getMapValue().getFieldsMap())); - assertTrue(Values.isInt32Value(int32Value.getMapValue().getFieldsMap())); - assertFalse(Values.isInt32Value(regexValue.getMapValue().getFieldsMap())); - assertFalse(Values.isInt32Value(bsonTimestampValue.getMapValue().getFieldsMap())); - assertFalse(Values.isInt32Value(bsonObjectIdValue.getMapValue().getFieldsMap())); - assertFalse(Values.isInt32Value(bsonBinaryDataValue1.getMapValue().getFieldsMap())); - assertFalse(Values.isInt32Value(bsonBinaryDataValue2.getMapValue().getFieldsMap())); - - assertFalse(Values.isRegexValue(minKeyValue.getMapValue().getFieldsMap())); - assertFalse(Values.isRegexValue(maxKeyValue.getMapValue().getFieldsMap())); - assertFalse(Values.isRegexValue(int32Value.getMapValue().getFieldsMap())); - assertTrue(Values.isRegexValue(regexValue.getMapValue().getFieldsMap())); - assertFalse(Values.isRegexValue(bsonTimestampValue.getMapValue().getFieldsMap())); - assertFalse(Values.isRegexValue(bsonObjectIdValue.getMapValue().getFieldsMap())); - assertFalse(Values.isRegexValue(bsonBinaryDataValue1.getMapValue().getFieldsMap())); - assertFalse(Values.isRegexValue(bsonBinaryDataValue2.getMapValue().getFieldsMap())); - - assertFalse(Values.isBsonTimestamp(minKeyValue.getMapValue().getFieldsMap())); - assertFalse(Values.isBsonTimestamp(maxKeyValue.getMapValue().getFieldsMap())); - assertFalse(Values.isBsonTimestamp(int32Value.getMapValue().getFieldsMap())); - assertFalse(Values.isBsonTimestamp(regexValue.getMapValue().getFieldsMap())); - assertTrue(Values.isBsonTimestamp(bsonTimestampValue.getMapValue().getFieldsMap())); - assertFalse(Values.isBsonTimestamp(bsonObjectIdValue.getMapValue().getFieldsMap())); - assertFalse(Values.isBsonTimestamp(bsonBinaryDataValue1.getMapValue().getFieldsMap())); - assertFalse(Values.isBsonTimestamp(bsonBinaryDataValue2.getMapValue().getFieldsMap())); - - assertFalse(Values.isBsonObjectId(minKeyValue.getMapValue().getFieldsMap())); - assertFalse(Values.isBsonObjectId(maxKeyValue.getMapValue().getFieldsMap())); - assertFalse(Values.isBsonObjectId(int32Value.getMapValue().getFieldsMap())); - assertFalse(Values.isBsonObjectId(regexValue.getMapValue().getFieldsMap())); - assertFalse(Values.isBsonObjectId(bsonTimestampValue.getMapValue().getFieldsMap())); - assertTrue(Values.isBsonObjectId(bsonObjectIdValue.getMapValue().getFieldsMap())); - assertFalse(Values.isBsonObjectId(bsonBinaryDataValue1.getMapValue().getFieldsMap())); - assertFalse(Values.isBsonObjectId(bsonBinaryDataValue2.getMapValue().getFieldsMap())); - - assertFalse(Values.isBsonBinaryData(minKeyValue.getMapValue().getFieldsMap())); - assertFalse(Values.isBsonBinaryData(maxKeyValue.getMapValue().getFieldsMap())); - assertFalse(Values.isBsonBinaryData(int32Value.getMapValue().getFieldsMap())); - assertFalse(Values.isBsonBinaryData(regexValue.getMapValue().getFieldsMap())); - assertFalse(Values.isBsonBinaryData(bsonTimestampValue.getMapValue().getFieldsMap())); - assertFalse(Values.isBsonBinaryData(bsonObjectIdValue.getMapValue().getFieldsMap())); - assertTrue(Values.isBsonBinaryData(bsonBinaryDataValue1.getMapValue().getFieldsMap())); - assertTrue(Values.isBsonBinaryData(bsonBinaryDataValue2.getMapValue().getFieldsMap())); + assertTrue(Values.isMinKey(minKeyValue)); + assertFalse(Values.isMinKey(maxKeyValue)); + assertFalse(Values.isMinKey(int32Value)); + assertFalse(Values.isMinKey(decimal128)); + assertFalse(Values.isMinKey(regexValue)); + assertFalse(Values.isMinKey(bsonTimestampValue)); + assertFalse(Values.isMinKey(bsonObjectIdValue)); + assertFalse(Values.isMinKey(bsonBinaryDataValue1)); + assertFalse(Values.isMinKey(bsonBinaryDataValue2)); + + assertFalse(Values.isMaxKey(minKeyValue)); + assertTrue(Values.isMaxKey(maxKeyValue)); + assertFalse(Values.isMaxKey(int32Value)); + assertFalse(Values.isMaxKey(decimal128)); + assertFalse(Values.isMaxKey(regexValue)); + assertFalse(Values.isMaxKey(bsonTimestampValue)); + assertFalse(Values.isMaxKey(bsonObjectIdValue)); + assertFalse(Values.isMaxKey(bsonBinaryDataValue1)); + assertFalse(Values.isMaxKey(bsonBinaryDataValue2)); + + assertFalse(Values.isInt32Value(minKeyValue)); + assertFalse(Values.isInt32Value(maxKeyValue)); + assertTrue(Values.isInt32Value(int32Value)); + assertFalse(Values.isInt32Value(decimal128)); + assertFalse(Values.isInt32Value(regexValue)); + assertFalse(Values.isInt32Value(bsonTimestampValue)); + assertFalse(Values.isInt32Value(bsonObjectIdValue)); + assertFalse(Values.isInt32Value(bsonBinaryDataValue1)); + assertFalse(Values.isInt32Value(bsonBinaryDataValue2)); + + assertFalse(Values.isDecimal128Value(minKeyValue)); + assertFalse(Values.isDecimal128Value(maxKeyValue)); + assertFalse(Values.isDecimal128Value(int32Value)); + assertTrue(Values.isDecimal128Value(decimal128)); + assertFalse(Values.isDecimal128Value(regexValue)); + assertFalse(Values.isDecimal128Value(bsonTimestampValue)); + assertFalse(Values.isDecimal128Value(bsonObjectIdValue)); + assertFalse(Values.isDecimal128Value(bsonBinaryDataValue1)); + assertFalse(Values.isDecimal128Value(bsonBinaryDataValue2)); + + assertFalse(Values.isRegexValue(minKeyValue)); + assertFalse(Values.isRegexValue(maxKeyValue)); + assertFalse(Values.isRegexValue(int32Value)); + assertFalse(Values.isRegexValue(decimal128)); + assertTrue(Values.isRegexValue(regexValue)); + assertFalse(Values.isRegexValue(bsonTimestampValue)); + assertFalse(Values.isRegexValue(bsonObjectIdValue)); + assertFalse(Values.isRegexValue(bsonBinaryDataValue1)); + assertFalse(Values.isRegexValue(bsonBinaryDataValue2)); + + assertFalse(Values.isBsonTimestamp(minKeyValue)); + assertFalse(Values.isBsonTimestamp(maxKeyValue)); + assertFalse(Values.isBsonTimestamp(int32Value)); + assertFalse(Values.isBsonTimestamp(decimal128)); + assertFalse(Values.isBsonTimestamp(regexValue)); + assertTrue(Values.isBsonTimestamp(bsonTimestampValue)); + assertFalse(Values.isBsonTimestamp(bsonObjectIdValue)); + assertFalse(Values.isBsonTimestamp(bsonBinaryDataValue1)); + assertFalse(Values.isBsonTimestamp(bsonBinaryDataValue2)); + + assertFalse(Values.isBsonObjectId(minKeyValue)); + assertFalse(Values.isBsonObjectId(maxKeyValue)); + assertFalse(Values.isBsonObjectId(int32Value)); + assertFalse(Values.isBsonObjectId(decimal128)); + assertFalse(Values.isBsonObjectId(regexValue)); + assertFalse(Values.isBsonObjectId(bsonTimestampValue)); + assertTrue(Values.isBsonObjectId(bsonObjectIdValue)); + assertFalse(Values.isBsonObjectId(bsonBinaryDataValue1)); + assertFalse(Values.isBsonObjectId(bsonBinaryDataValue2)); + + assertFalse(Values.isBsonBinaryData(minKeyValue)); + assertFalse(Values.isBsonBinaryData(maxKeyValue)); + assertFalse(Values.isBsonBinaryData(int32Value)); + assertFalse(Values.isBsonBinaryData(decimal128)); + assertFalse(Values.isBsonBinaryData(regexValue)); + assertFalse(Values.isBsonBinaryData(bsonTimestampValue)); + assertFalse(Values.isBsonBinaryData(bsonObjectIdValue)); + assertTrue(Values.isBsonBinaryData(bsonBinaryDataValue1)); + assertTrue(Values.isBsonBinaryData(bsonBinaryDataValue2)); assertEquals(Values.detectMapRepresentation(minKeyValue), MapRepresentation.MIN_KEY); assertEquals(Values.detectMapRepresentation(maxKeyValue), MapRepresentation.MAX_KEY); assertEquals(Values.detectMapRepresentation(int32Value), MapRepresentation.INT32); + assertEquals(Values.detectMapRepresentation(decimal128), MapRepresentation.DECIMAL128); assertEquals(Values.detectMapRepresentation(regexValue), MapRepresentation.REGEX); assertEquals( Values.detectMapRepresentation(bsonTimestampValue), MapRepresentation.BSON_TIMESTAMP); diff --git a/firebase-firestore/src/test/java/com/google/firebase/firestore/remote/RemoteSerializerTest.java b/firebase-firestore/src/test/java/com/google/firebase/firestore/remote/RemoteSerializerTest.java index 26f665b38f2..d2c31db9623 100644 --- a/firebase-firestore/src/test/java/com/google/firebase/firestore/remote/RemoteSerializerTest.java +++ b/firebase-firestore/src/test/java/com/google/firebase/firestore/remote/RemoteSerializerTest.java @@ -435,6 +435,21 @@ public void testEncodesInt32Values() { assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); } + @Test + public void testEncodesDecimal128Values() { + Value model = wrap(new com.google.firebase.firestore.Decimal128Value("1e3")); + + Value proto = + Value.newBuilder() + .setMapValue( + MapValue.newBuilder() + .putFields("__decimal128__", Value.newBuilder().setStringValue("1e3").build()) + .build()) + .build(); + + assertRoundTrip(model, proto, Value.ValueTypeCase.MAP_VALUE); + } + @Test public void testEncodesMinKey() { Value model = wrap(MinKey.instance());